repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
googleapis/google-api-java-client-services | 35,857 | clients/google-api-services-compute/v1/1.27.0/com/google/api/services/compute/model/Image.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Represents an Image resource.
*
* You can use images to create boot disks for your VM instances. For more information, read Images.
* (== resource_for beta.images ==) (== resource_for v1.images ==)
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Image extends com.google.api.client.json.GenericJson {
/**
* Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long archiveSizeBytes;
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String creationTimestamp;
/**
* The deprecation status associated with this image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private DeprecationStatus deprecated;
/**
* An optional description of this resource. Provide this property when you create the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* Size of the image when restored onto a persistent disk (in GB).
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long diskSizeGb;
/**
* The name of the image family to which this image belongs. You can create disks by specifying an
* image family instead of a specific image name. The image family always returns its latest image
* that is not deprecated. The name of the image family must comply with RFC1035.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String family;
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<GuestOsFeature> guestOsFeatures;
static {
// hack to force ProGuard to consider GuestOsFeature used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(GuestOsFeature.class);
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.math.BigInteger id;
/**
* Encrypts the image using a customer-supplied encryption key.
*
* After you encrypt an image with a customer-supplied key, you must provide the same key if you
* use the image later (e.g. to create a disk from the image).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the image, then the disk will be
* encrypted using an automatically generated key and you do not need to provide a key to use the
* image later.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey imageEncryptionKey;
/**
* [Output Only] Type of the resource. Always compute#image for images.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String labelFingerprint;
/**
* Labels to apply to this image. These can be later modified by the setLabels method.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.Map<String, java.lang.String> labels;
/**
* Integer license codes indicating which licenses are attached to this image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.util.List<java.lang.Long> licenseCodes;
/**
* Any applicable license URI.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> licenses;
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* The parameters of the raw disk image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private RawDisk rawDisk;
/**
* [Output Only] Server-defined URL for the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selfLink;
/**
* URL of the source disk used to create this image. This can be a full or valid partial URL. You
* must provide either this property or the rawDisk.source property but not both to create an
* image. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk -
* projects/project/zones/zone/disks/disk - zones/zone/disks/disk
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceDisk;
/**
* The customer-supplied encryption key of the source disk. Required if the source disk is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceDiskEncryptionKey;
/**
* [Output Only] The ID value of the disk used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given disk
* name.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceDiskId;
/**
* URL of the source image used to create this image. This can be a full or valid partial URL. You
* must provide exactly one of: - this property, or - the rawDisk.source property, or - the
* sourceDisk property in order to create an image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceImage;
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceImageEncryptionKey;
/**
* [Output Only] The ID value of the image used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given image
* name.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceImageId;
/**
* URL of the source snapshot used to create this image. This can be a full or valid partial URL.
* You must provide exactly one of: - this property, or - the sourceImage property, or - the
* rawDisk.source property, or - the sourceDisk property in order to create an image.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceSnapshot;
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CustomerEncryptionKey sourceSnapshotEncryptionKey;
/**
* [Output Only] The ID value of the snapshot used to create this image. This value may be used to
* determine whether the snapshot was taken from the current or a previous instance of a given
* snapshot name.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceSnapshotId;
/**
* The type of the image used to create this disk. The default and only value is RAW
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sourceType;
/**
* [Output Only] The status of the image. An image can be used to create other resources, such as
* instances, only after the image has been successfully created and the status is set to READY.
* Possible values are FAILED, PENDING, or READY.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String status;
/**
* Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
* @return value or {@code null} for none
*/
public java.lang.Long getArchiveSizeBytes() {
return archiveSizeBytes;
}
/**
* Size of the image tar.gz archive stored in Google Cloud Storage (in bytes).
* @param archiveSizeBytes archiveSizeBytes or {@code null} for none
*/
public Image setArchiveSizeBytes(java.lang.Long archiveSizeBytes) {
this.archiveSizeBytes = archiveSizeBytes;
return this;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @return value or {@code null} for none
*/
public java.lang.String getCreationTimestamp() {
return creationTimestamp;
}
/**
* [Output Only] Creation timestamp in RFC3339 text format.
* @param creationTimestamp creationTimestamp or {@code null} for none
*/
public Image setCreationTimestamp(java.lang.String creationTimestamp) {
this.creationTimestamp = creationTimestamp;
return this;
}
/**
* The deprecation status associated with this image.
* @return value or {@code null} for none
*/
public DeprecationStatus getDeprecated() {
return deprecated;
}
/**
* The deprecation status associated with this image.
* @param deprecated deprecated or {@code null} for none
*/
public Image setDeprecated(DeprecationStatus deprecated) {
this.deprecated = deprecated;
return this;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @param description description or {@code null} for none
*/
public Image setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* Size of the image when restored onto a persistent disk (in GB).
* @return value or {@code null} for none
*/
public java.lang.Long getDiskSizeGb() {
return diskSizeGb;
}
/**
* Size of the image when restored onto a persistent disk (in GB).
* @param diskSizeGb diskSizeGb or {@code null} for none
*/
public Image setDiskSizeGb(java.lang.Long diskSizeGb) {
this.diskSizeGb = diskSizeGb;
return this;
}
/**
* The name of the image family to which this image belongs. You can create disks by specifying an
* image family instead of a specific image name. The image family always returns its latest image
* that is not deprecated. The name of the image family must comply with RFC1035.
* @return value or {@code null} for none
*/
public java.lang.String getFamily() {
return family;
}
/**
* The name of the image family to which this image belongs. You can create disks by specifying an
* image family instead of a specific image name. The image family always returns its latest image
* that is not deprecated. The name of the image family must comply with RFC1035.
* @param family family or {@code null} for none
*/
public Image setFamily(java.lang.String family) {
this.family = family;
return this;
}
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* @return value or {@code null} for none
*/
public java.util.List<GuestOsFeature> getGuestOsFeatures() {
return guestOsFeatures;
}
/**
* A list of features to enable on the guest operating system. Applicable only for bootable
* images. Read Enabling guest operating system features to see a list of available options.
* @param guestOsFeatures guestOsFeatures or {@code null} for none
*/
public Image setGuestOsFeatures(java.util.List<GuestOsFeature> guestOsFeatures) {
this.guestOsFeatures = guestOsFeatures;
return this;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @return value or {@code null} for none
*/
public java.math.BigInteger getId() {
return id;
}
/**
* [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* @param id id or {@code null} for none
*/
public Image setId(java.math.BigInteger id) {
this.id = id;
return this;
}
/**
* Encrypts the image using a customer-supplied encryption key.
*
* After you encrypt an image with a customer-supplied key, you must provide the same key if you
* use the image later (e.g. to create a disk from the image).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the image, then the disk will be
* encrypted using an automatically generated key and you do not need to provide a key to use the
* image later.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getImageEncryptionKey() {
return imageEncryptionKey;
}
/**
* Encrypts the image using a customer-supplied encryption key.
*
* After you encrypt an image with a customer-supplied key, you must provide the same key if you
* use the image later (e.g. to create a disk from the image).
*
* Customer-supplied encryption keys do not protect access to metadata of the disk.
*
* If you do not provide an encryption key when creating the image, then the disk will be
* encrypted using an automatically generated key and you do not need to provide a key to use the
* image later.
* @param imageEncryptionKey imageEncryptionKey or {@code null} for none
*/
public Image setImageEncryptionKey(CustomerEncryptionKey imageEncryptionKey) {
this.imageEncryptionKey = imageEncryptionKey;
return this;
}
/**
* [Output Only] Type of the resource. Always compute#image for images.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* [Output Only] Type of the resource. Always compute#image for images.
* @param kind kind or {@code null} for none
*/
public Image setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #decodeLabelFingerprint()
* @return value or {@code null} for none
*/
public java.lang.String getLabelFingerprint() {
return labelFingerprint;
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #getLabelFingerprint()
* @return Base64 decoded value or {@code null} for none
*
* @since 1.14
*/
public byte[] decodeLabelFingerprint() {
return com.google.api.client.util.Base64.decodeBase64(labelFingerprint);
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #encodeLabelFingerprint()
* @param labelFingerprint labelFingerprint or {@code null} for none
*/
public Image setLabelFingerprint(java.lang.String labelFingerprint) {
this.labelFingerprint = labelFingerprint;
return this;
}
/**
* A fingerprint for the labels being applied to this image, which is essentially a hash of the
* labels used for optimistic locking. The fingerprint is initially generated by Compute Engine
* and changes after every request to modify or update labels. You must always provide an up-to-
* date fingerprint hash in order to update or change labels, otherwise the request will fail with
* error 412 conditionNotMet.
*
* To see the latest fingerprint, make a get() request to retrieve an image.
* @see #setLabelFingerprint()
*
* <p>
* The value is encoded Base64 or {@code null} for none.
* </p>
*
* @since 1.14
*/
public Image encodeLabelFingerprint(byte[] labelFingerprint) {
this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint);
return this;
}
/**
* Labels to apply to this image. These can be later modified by the setLabels method.
* @return value or {@code null} for none
*/
public java.util.Map<String, java.lang.String> getLabels() {
return labels;
}
/**
* Labels to apply to this image. These can be later modified by the setLabels method.
* @param labels labels or {@code null} for none
*/
public Image setLabels(java.util.Map<String, java.lang.String> labels) {
this.labels = labels;
return this;
}
/**
* Integer license codes indicating which licenses are attached to this image.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.Long> getLicenseCodes() {
return licenseCodes;
}
/**
* Integer license codes indicating which licenses are attached to this image.
* @param licenseCodes licenseCodes or {@code null} for none
*/
public Image setLicenseCodes(java.util.List<java.lang.Long> licenseCodes) {
this.licenseCodes = licenseCodes;
return this;
}
/**
* Any applicable license URI.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getLicenses() {
return licenses;
}
/**
* Any applicable license URI.
* @param licenses licenses or {@code null} for none
*/
public Image setLicenses(java.util.List<java.lang.String> licenses) {
this.licenses = licenses;
return this;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Name of the resource; provided by the client when the resource is created. The name must be
* 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters
* long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` which means the first
* character must be a lowercase letter, and all following characters must be a dash, lowercase
* letter, or digit, except the last character, which cannot be a dash.
* @param name name or {@code null} for none
*/
public Image setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* The parameters of the raw disk image.
* @return value or {@code null} for none
*/
public RawDisk getRawDisk() {
return rawDisk;
}
/**
* The parameters of the raw disk image.
* @param rawDisk rawDisk or {@code null} for none
*/
public Image setRawDisk(RawDisk rawDisk) {
this.rawDisk = rawDisk;
return this;
}
/**
* [Output Only] Server-defined URL for the resource.
* @return value or {@code null} for none
*/
public java.lang.String getSelfLink() {
return selfLink;
}
/**
* [Output Only] Server-defined URL for the resource.
* @param selfLink selfLink or {@code null} for none
*/
public Image setSelfLink(java.lang.String selfLink) {
this.selfLink = selfLink;
return this;
}
/**
* URL of the source disk used to create this image. This can be a full or valid partial URL. You
* must provide either this property or the rawDisk.source property but not both to create an
* image. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk -
* projects/project/zones/zone/disks/disk - zones/zone/disks/disk
* @return value or {@code null} for none
*/
public java.lang.String getSourceDisk() {
return sourceDisk;
}
/**
* URL of the source disk used to create this image. This can be a full or valid partial URL. You
* must provide either this property or the rawDisk.source property but not both to create an
* image. For example, the following are valid values: -
* https://www.googleapis.com/compute/v1/projects/project/zones/zone/disks/disk -
* projects/project/zones/zone/disks/disk - zones/zone/disks/disk
* @param sourceDisk sourceDisk or {@code null} for none
*/
public Image setSourceDisk(java.lang.String sourceDisk) {
this.sourceDisk = sourceDisk;
return this;
}
/**
* The customer-supplied encryption key of the source disk. Required if the source disk is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceDiskEncryptionKey() {
return sourceDiskEncryptionKey;
}
/**
* The customer-supplied encryption key of the source disk. Required if the source disk is
* protected by a customer-supplied encryption key.
* @param sourceDiskEncryptionKey sourceDiskEncryptionKey or {@code null} for none
*/
public Image setSourceDiskEncryptionKey(CustomerEncryptionKey sourceDiskEncryptionKey) {
this.sourceDiskEncryptionKey = sourceDiskEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the disk used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given disk
* name.
* @return value or {@code null} for none
*/
public java.lang.String getSourceDiskId() {
return sourceDiskId;
}
/**
* [Output Only] The ID value of the disk used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given disk
* name.
* @param sourceDiskId sourceDiskId or {@code null} for none
*/
public Image setSourceDiskId(java.lang.String sourceDiskId) {
this.sourceDiskId = sourceDiskId;
return this;
}
/**
* URL of the source image used to create this image. This can be a full or valid partial URL. You
* must provide exactly one of: - this property, or - the rawDisk.source property, or - the
* sourceDisk property in order to create an image.
* @return value or {@code null} for none
*/
public java.lang.String getSourceImage() {
return sourceImage;
}
/**
* URL of the source image used to create this image. This can be a full or valid partial URL. You
* must provide exactly one of: - this property, or - the rawDisk.source property, or - the
* sourceDisk property in order to create an image.
* @param sourceImage sourceImage or {@code null} for none
*/
public Image setSourceImage(java.lang.String sourceImage) {
this.sourceImage = sourceImage;
return this;
}
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceImageEncryptionKey() {
return sourceImageEncryptionKey;
}
/**
* The customer-supplied encryption key of the source image. Required if the source image is
* protected by a customer-supplied encryption key.
* @param sourceImageEncryptionKey sourceImageEncryptionKey or {@code null} for none
*/
public Image setSourceImageEncryptionKey(CustomerEncryptionKey sourceImageEncryptionKey) {
this.sourceImageEncryptionKey = sourceImageEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the image used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given image
* name.
* @return value or {@code null} for none
*/
public java.lang.String getSourceImageId() {
return sourceImageId;
}
/**
* [Output Only] The ID value of the image used to create this image. This value may be used to
* determine whether the image was taken from the current or a previous instance of a given image
* name.
* @param sourceImageId sourceImageId or {@code null} for none
*/
public Image setSourceImageId(java.lang.String sourceImageId) {
this.sourceImageId = sourceImageId;
return this;
}
/**
* URL of the source snapshot used to create this image. This can be a full or valid partial URL.
* You must provide exactly one of: - this property, or - the sourceImage property, or - the
* rawDisk.source property, or - the sourceDisk property in order to create an image.
* @return value or {@code null} for none
*/
public java.lang.String getSourceSnapshot() {
return sourceSnapshot;
}
/**
* URL of the source snapshot used to create this image. This can be a full or valid partial URL.
* You must provide exactly one of: - this property, or - the sourceImage property, or - the
* rawDisk.source property, or - the sourceDisk property in order to create an image.
* @param sourceSnapshot sourceSnapshot or {@code null} for none
*/
public Image setSourceSnapshot(java.lang.String sourceSnapshot) {
this.sourceSnapshot = sourceSnapshot;
return this;
}
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* @return value or {@code null} for none
*/
public CustomerEncryptionKey getSourceSnapshotEncryptionKey() {
return sourceSnapshotEncryptionKey;
}
/**
* The customer-supplied encryption key of the source snapshot. Required if the source snapshot is
* protected by a customer-supplied encryption key.
* @param sourceSnapshotEncryptionKey sourceSnapshotEncryptionKey or {@code null} for none
*/
public Image setSourceSnapshotEncryptionKey(CustomerEncryptionKey sourceSnapshotEncryptionKey) {
this.sourceSnapshotEncryptionKey = sourceSnapshotEncryptionKey;
return this;
}
/**
* [Output Only] The ID value of the snapshot used to create this image. This value may be used to
* determine whether the snapshot was taken from the current or a previous instance of a given
* snapshot name.
* @return value or {@code null} for none
*/
public java.lang.String getSourceSnapshotId() {
return sourceSnapshotId;
}
/**
* [Output Only] The ID value of the snapshot used to create this image. This value may be used to
* determine whether the snapshot was taken from the current or a previous instance of a given
* snapshot name.
* @param sourceSnapshotId sourceSnapshotId or {@code null} for none
*/
public Image setSourceSnapshotId(java.lang.String sourceSnapshotId) {
this.sourceSnapshotId = sourceSnapshotId;
return this;
}
/**
* The type of the image used to create this disk. The default and only value is RAW
* @return value or {@code null} for none
*/
public java.lang.String getSourceType() {
return sourceType;
}
/**
* The type of the image used to create this disk. The default and only value is RAW
* @param sourceType sourceType or {@code null} for none
*/
public Image setSourceType(java.lang.String sourceType) {
this.sourceType = sourceType;
return this;
}
/**
* [Output Only] The status of the image. An image can be used to create other resources, such as
* instances, only after the image has been successfully created and the status is set to READY.
* Possible values are FAILED, PENDING, or READY.
* @return value or {@code null} for none
*/
public java.lang.String getStatus() {
return status;
}
/**
* [Output Only] The status of the image. An image can be used to create other resources, such as
* instances, only after the image has been successfully created and the status is set to READY.
* Possible values are FAILED, PENDING, or READY.
* @param status status or {@code null} for none
*/
public Image setStatus(java.lang.String status) {
this.status = status;
return this;
}
@Override
public Image set(String fieldName, Object value) {
return (Image) super.set(fieldName, value);
}
@Override
public Image clone() {
return (Image) super.clone();
}
/**
* The parameters of the raw disk image.
*/
public static final class RawDisk extends com.google.api.client.json.GenericJson {
/**
* The format used to encode and transmit the block device, which should be TAR. This is just a
* container and transmission format and not a runtime format. Provided by the client when the
* disk image is created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String containerType;
/**
* [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before
* unpackaging provided by the client when the disk image is created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String sha1Checksum;
/**
* The full Google Cloud Storage URL where the disk image is stored. You must provide either this
* property or the sourceDisk property but not both.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String source;
/**
* The format used to encode and transmit the block device, which should be TAR. This is just a
* container and transmission format and not a runtime format. Provided by the client when the
* disk image is created.
* @return value or {@code null} for none
*/
public java.lang.String getContainerType() {
return containerType;
}
/**
* The format used to encode and transmit the block device, which should be TAR. This is just a
* container and transmission format and not a runtime format. Provided by the client when the
* disk image is created.
* @param containerType containerType or {@code null} for none
*/
public RawDisk setContainerType(java.lang.String containerType) {
this.containerType = containerType;
return this;
}
/**
* [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before
* unpackaging provided by the client when the disk image is created.
* @return value or {@code null} for none
*/
public java.lang.String getSha1Checksum() {
return sha1Checksum;
}
/**
* [Deprecated] This field is deprecated. An optional SHA1 checksum of the disk image before
* unpackaging provided by the client when the disk image is created.
* @param sha1Checksum sha1Checksum or {@code null} for none
*/
public RawDisk setSha1Checksum(java.lang.String sha1Checksum) {
this.sha1Checksum = sha1Checksum;
return this;
}
/**
* The full Google Cloud Storage URL where the disk image is stored. You must provide either this
* property or the sourceDisk property but not both.
* @return value or {@code null} for none
*/
public java.lang.String getSource() {
return source;
}
/**
* The full Google Cloud Storage URL where the disk image is stored. You must provide either this
* property or the sourceDisk property but not both.
* @param source source or {@code null} for none
*/
public RawDisk setSource(java.lang.String source) {
this.source = source;
return this;
}
@Override
public RawDisk set(String fieldName, Object value) {
return (RawDisk) super.set(fieldName, value);
}
@Override
public RawDisk clone() {
return (RawDisk) super.clone();
}
}
}
|
googleapis/google-cloud-java | 35,736 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/SummarizationQualityInput.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Input for summarization quality metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.SummarizationQualityInput}
*/
public final class SummarizationQualityInput extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.SummarizationQualityInput)
SummarizationQualityInputOrBuilder {
private static final long serialVersionUID = 0L;
// Use SummarizationQualityInput.newBuilder() to construct.
private SummarizationQualityInput(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SummarizationQualityInput() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SummarizationQualityInput();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SummarizationQualityInput_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SummarizationQualityInput_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput.class,
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput.Builder.class);
}
private int bitField0_;
public static final int METRIC_SPEC_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metricSpec_;
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the metricSpec field is set.
*/
@java.lang.Override
public boolean hasMetricSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The metricSpec.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec getMetricSpec() {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec.getDefaultInstance()
: metricSpec_;
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpecOrBuilder
getMetricSpecOrBuilder() {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec.getDefaultInstance()
: metricSpec_;
}
public static final int INSTANCE_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance_;
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
@java.lang.Override
public boolean hasInstance() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance getInstance() {
return instance_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance.getDefaultInstance()
: instance_;
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstanceOrBuilder
getInstanceOrBuilder() {
return instance_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance.getDefaultInstance()
: instance_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getMetricSpec());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getInstance());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMetricSpec());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInstance());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput other =
(com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput) obj;
if (hasMetricSpec() != other.hasMetricSpec()) return false;
if (hasMetricSpec()) {
if (!getMetricSpec().equals(other.getMetricSpec())) return false;
}
if (hasInstance() != other.hasInstance()) return false;
if (hasInstance()) {
if (!getInstance().equals(other.getInstance())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMetricSpec()) {
hash = (37 * hash) + METRIC_SPEC_FIELD_NUMBER;
hash = (53 * hash) + getMetricSpec().hashCode();
}
if (hasInstance()) {
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Input for summarization quality metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.SummarizationQualityInput}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.SummarizationQualityInput)
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInputOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SummarizationQualityInput_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SummarizationQualityInput_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput.class,
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMetricSpecFieldBuilder();
getInstanceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
metricSpec_ = null;
if (metricSpecBuilder_ != null) {
metricSpecBuilder_.dispose();
metricSpecBuilder_ = null;
}
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_SummarizationQualityInput_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput build() {
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput buildPartial() {
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput result =
new com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.metricSpec_ = metricSpecBuilder_ == null ? metricSpec_ : metricSpecBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput other) {
if (other
== com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput.getDefaultInstance())
return this;
if (other.hasMetricSpec()) {
mergeMetricSpec(other.getMetricSpec());
}
if (other.hasInstance()) {
mergeInstance(other.getInstance());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getMetricSpecFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metricSpec_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec,
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpecOrBuilder>
metricSpecBuilder_;
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the metricSpec field is set.
*/
public boolean hasMetricSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The metricSpec.
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec getMetricSpec() {
if (metricSpecBuilder_ == null) {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec.getDefaultInstance()
: metricSpec_;
} else {
return metricSpecBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMetricSpec(
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec value) {
if (metricSpecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
metricSpec_ = value;
} else {
metricSpecBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMetricSpec(
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec.Builder builderForValue) {
if (metricSpecBuilder_ == null) {
metricSpec_ = builderForValue.build();
} else {
metricSpecBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeMetricSpec(
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec value) {
if (metricSpecBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& metricSpec_ != null
&& metricSpec_
!= com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec
.getDefaultInstance()) {
getMetricSpecBuilder().mergeFrom(value);
} else {
metricSpec_ = value;
}
} else {
metricSpecBuilder_.mergeFrom(value);
}
if (metricSpec_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearMetricSpec() {
bitField0_ = (bitField0_ & ~0x00000001);
metricSpec_ = null;
if (metricSpecBuilder_ != null) {
metricSpecBuilder_.dispose();
metricSpecBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec.Builder
getMetricSpecBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getMetricSpecFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpecOrBuilder
getMetricSpecOrBuilder() {
if (metricSpecBuilder_ != null) {
return metricSpecBuilder_.getMessageOrBuilder();
} else {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec.getDefaultInstance()
: metricSpec_;
}
}
/**
*
*
* <pre>
* Required. Spec for summarization quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec,
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpecOrBuilder>
getMetricSpecFieldBuilder() {
if (metricSpecBuilder_ == null) {
metricSpecBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec,
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpec.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationQualitySpecOrBuilder>(
getMetricSpec(), getParentForChildren(), isClean());
metricSpec_ = null;
}
return metricSpecBuilder_;
}
private com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance,
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstanceOrBuilder>
instanceBuilder_;
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
public boolean hasInstance() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance getInstance() {
if (instanceBuilder_ == null) {
return instance_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance.getDefaultInstance()
: instance_;
} else {
return instanceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance value) {
if (instanceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
} else {
instanceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance.Builder builderForValue) {
if (instanceBuilder_ == null) {
instance_ = builderForValue.build();
} else {
instanceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInstance(
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance value) {
if (instanceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& instance_ != null
&& instance_
!= com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance
.getDefaultInstance()) {
getInstanceBuilder().mergeFrom(value);
} else {
instance_ = value;
}
} else {
instanceBuilder_.mergeFrom(value);
}
if (instance_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInstance() {
bitField0_ = (bitField0_ & ~0x00000002);
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance.Builder
getInstanceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getInstanceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstanceOrBuilder
getInstanceOrBuilder() {
if (instanceBuilder_ != null) {
return instanceBuilder_.getMessageOrBuilder();
} else {
return instance_ == null
? com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance.getDefaultInstance()
: instance_;
}
}
/**
*
*
* <pre>
* Required. Summarization quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1beta1.SummarizationQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance,
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstanceOrBuilder>
getInstanceFieldBuilder() {
if (instanceBuilder_ == null) {
instanceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance,
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstance.Builder,
com.google.cloud.aiplatform.v1beta1.SummarizationQualityInstanceOrBuilder>(
getInstance(), getParentForChildren(), isClean());
instance_ = null;
}
return instanceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.SummarizationQualityInput)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.SummarizationQualityInput)
private static final com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput();
}
public static com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SummarizationQualityInput> PARSER =
new com.google.protobuf.AbstractParser<SummarizationQualityInput>() {
@java.lang.Override
public SummarizationQualityInput parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SummarizationQualityInput> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SummarizationQualityInput> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.SummarizationQualityInput getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/pinot | 35,893 | pinot-core/src/main/java/org/apache/pinot/core/util/DataBlockExtractUtils.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.core.util;
import com.google.common.base.Preconditions;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.annotation.Nullable;
import org.apache.pinot.common.CustomObject;
import org.apache.pinot.common.datablock.DataBlock;
import org.apache.pinot.common.utils.DataSchema;
import org.apache.pinot.common.utils.DataSchema.ColumnDataType;
import org.apache.pinot.core.query.aggregation.function.AggregationFunction;
import org.apache.pinot.spi.data.FieldSpec.DataType;
import org.apache.pinot.spi.utils.CommonConstants.NullValuePlaceHolder;
import org.apache.pinot.spi.utils.MapUtils;
import org.roaringbitmap.PeekableIntIterator;
import org.roaringbitmap.RoaringBitmap;
/**
* Utils to extract values from {@link DataBlock}.
*/
public final class DataBlockExtractUtils {
private DataBlockExtractUtils() {
}
public static List<Object[]> extractRows(DataBlock dataBlock) {
DataSchema dataSchema = dataBlock.getDataSchema();
ColumnDataType[] storedTypes = dataSchema.getStoredColumnDataTypes();
int numColumns = storedTypes.length;
RoaringBitmap[] nullBitmaps = new RoaringBitmap[numColumns];
for (int colId = 0; colId < numColumns; colId++) {
nullBitmaps[colId] = dataBlock.getNullRowIds(colId);
}
int numRows = dataBlock.getNumberOfRows();
List<Object[]> rows = new ArrayList<>(numRows);
for (int rowId = 0; rowId < numRows; rowId++) {
Object[] row = new Object[numColumns];
for (int colId = 0; colId < numColumns; colId++) {
RoaringBitmap nullBitmap = nullBitmaps[colId];
if (nullBitmap == null || !nullBitmap.contains(rowId)) {
row[colId] = extractValue(dataBlock, storedTypes[colId], rowId, colId);
}
}
rows.add(row);
}
return rows;
}
private static Object extractValue(DataBlock dataBlock, ColumnDataType storedType, int rowId, int colId) {
switch (storedType) {
// Single-value column
case INT:
return dataBlock.getInt(rowId, colId);
case LONG:
return dataBlock.getLong(rowId, colId);
case FLOAT:
return dataBlock.getFloat(rowId, colId);
case DOUBLE:
return dataBlock.getDouble(rowId, colId);
case BIG_DECIMAL:
return dataBlock.getBigDecimal(rowId, colId);
case STRING:
return dataBlock.getString(rowId, colId);
case BYTES:
return dataBlock.getBytes(rowId, colId);
case MAP:
return MapUtils.deserializeMap(dataBlock.getBytes(rowId, colId).getBytes());
// Multi-value column
case INT_ARRAY:
return dataBlock.getIntArray(rowId, colId);
case LONG_ARRAY:
return dataBlock.getLongArray(rowId, colId);
case FLOAT_ARRAY:
return dataBlock.getFloatArray(rowId, colId);
case DOUBLE_ARRAY:
return dataBlock.getDoubleArray(rowId, colId);
case STRING_ARRAY:
return dataBlock.getStringArray(rowId, colId);
// Null
case UNKNOWN:
return null;
default:
throw new IllegalStateException("Unsupported stored type: " + storedType + " for column: "
+ dataBlock.getDataSchema().getColumnName(colId));
}
}
public static Object[] extractAggResult(DataBlock dataBlock, int colId, AggregationFunction aggFunction) {
DataSchema dataSchema = dataBlock.getDataSchema();
ColumnDataType storedType = dataSchema.getColumnDataType(colId).getStoredType();
int numRows = dataBlock.getNumberOfRows();
Object[] values = new Object[numRows];
if (storedType == ColumnDataType.OBJECT) {
// Ignore null bitmap for custom object because null is supported in custom object
for (int rowId = 0; rowId < numRows; rowId++) {
CustomObject customObject = dataBlock.getCustomObject(rowId, colId);
if (customObject != null) {
values[rowId] = aggFunction.deserializeIntermediateResult(customObject);
}
}
} else {
RoaringBitmap nullBitmap = dataBlock.getNullRowIds(colId);
if (nullBitmap == null) {
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = extractValue(dataBlock, storedType, rowId, colId);
}
} else {
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = extractValue(dataBlock, storedType, rowId, colId);
}
}
}
}
return values;
}
public static Object[][] extractKeys(DataBlock dataBlock, int[] keyIds) {
DataSchema dataSchema = dataBlock.getDataSchema();
int numKeys = keyIds.length;
ColumnDataType[] storedTypes = new ColumnDataType[numKeys];
RoaringBitmap[] nullBitmaps = new RoaringBitmap[numKeys];
for (int colId = 0; colId < numKeys; colId++) {
storedTypes[colId] = dataSchema.getColumnDataType(keyIds[colId]).getStoredType();
nullBitmaps[colId] = dataBlock.getNullRowIds(keyIds[colId]);
}
int numRows = dataBlock.getNumberOfRows();
Object[][] keys = new Object[numRows][];
for (int rowId = 0; rowId < numRows; rowId++) {
Object[] values = new Object[numKeys];
for (int colId = 0; colId < numKeys; colId++) {
RoaringBitmap nullBitmap = nullBitmaps[colId];
if (nullBitmap == null || !nullBitmap.contains(rowId)) {
values[colId] = extractValue(dataBlock, storedTypes[colId], rowId, keyIds[colId]);
}
}
keys[rowId] = values;
}
return keys;
}
public static Object[][] extractKeys(DataBlock dataBlock, int[] keyIds, int numMatchedRows,
RoaringBitmap matchedBitmap) {
DataSchema dataSchema = dataBlock.getDataSchema();
int numKeys = keyIds.length;
ColumnDataType[] storedTypes = new ColumnDataType[numKeys];
RoaringBitmap[] nullBitmaps = new RoaringBitmap[numKeys];
for (int colId = 0; colId < numKeys; colId++) {
storedTypes[colId] = dataSchema.getColumnDataType(keyIds[colId]).getStoredType();
nullBitmaps[colId] = dataBlock.getNullRowIds(keyIds[colId]);
}
Object[][] keys = new Object[numMatchedRows][];
PeekableIntIterator iterator = matchedBitmap.getIntIterator();
for (int matchedRowId = 0; matchedRowId < numMatchedRows; matchedRowId++) {
int rowId = iterator.next();
Object[] values = new Object[numKeys];
for (int colId = 0; colId < numKeys; colId++) {
RoaringBitmap nullBitmap = nullBitmaps[colId];
if (nullBitmap == null || !nullBitmap.contains(rowId)) {
values[colId] = extractValue(dataBlock, storedTypes[colId], rowId, keyIds[colId]);
}
}
keys[matchedRowId] = values;
}
return keys;
}
public static Object[] extractKey(DataBlock dataBlock, int colId) {
DataSchema dataSchema = dataBlock.getDataSchema();
ColumnDataType storedType = dataSchema.getColumnDataType(colId).getStoredType();
RoaringBitmap nullBitmap = dataBlock.getNullRowIds(colId);
int numRows = dataBlock.getNumberOfRows();
Object[] values = new Object[numRows];
if (nullBitmap == null) {
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = extractValue(dataBlock, storedType, rowId, colId);
}
} else {
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = extractValue(dataBlock, storedType, rowId, colId);
}
}
}
return values;
}
public static Object[] extractKey(DataBlock dataBlock, int colId, int numMatchedRows,
RoaringBitmap matchedBitmap) {
DataSchema dataSchema = dataBlock.getDataSchema();
ColumnDataType storedType = dataSchema.getColumnDataType(colId).getStoredType();
RoaringBitmap nullBitmap = dataBlock.getNullRowIds(colId);
Object[] values = new Object[numMatchedRows];
PeekableIntIterator iterator = matchedBitmap.getIntIterator();
if (nullBitmap == null) {
for (int matchedRowId = 0; matchedRowId < numMatchedRows; matchedRowId++) {
int rowId = iterator.next();
values[matchedRowId] = extractValue(dataBlock, storedType, rowId, colId);
}
} else {
for (int matchedRowId = 0; matchedRowId < numMatchedRows; matchedRowId++) {
int rowId = iterator.next();
if (!nullBitmap.contains(rowId)) {
values[matchedRowId] = extractValue(dataBlock, storedType, rowId, colId);
}
}
}
return values;
}
public static int[] extractIntColumn(DataType storedType, DataBlock dataBlock, int colId,
@Nullable RoaringBitmap nullBitmap) {
int numRows = dataBlock.getNumberOfRows();
int[] values = new int[numRows];
if (numRows == 0 || storedType == DataType.UNKNOWN) {
return values;
}
if (nullBitmap == null) {
switch (storedType) {
case INT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getInt(rowId, colId);
}
break;
case LONG:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = (int) dataBlock.getLong(rowId, colId);
}
break;
case FLOAT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = (int) dataBlock.getFloat(rowId, colId);
}
break;
case DOUBLE:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = (int) dataBlock.getDouble(rowId, colId);
}
break;
case BIG_DECIMAL:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getBigDecimal(rowId, colId).intValue();
}
break;
default:
throw new IllegalStateException("Cannot extract int values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
} else {
switch (storedType) {
case INT:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getInt(rowId, colId);
}
}
break;
case LONG:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = (int) dataBlock.getLong(rowId, colId);
}
}
break;
case FLOAT:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = (int) dataBlock.getFloat(rowId, colId);
}
}
break;
case DOUBLE:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = (int) dataBlock.getDouble(rowId, colId);
}
}
break;
case BIG_DECIMAL:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getBigDecimal(rowId, colId).intValue();
}
}
break;
default:
throw new IllegalStateException("Cannot extract int values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
}
return values;
}
public static long[] extractLongColumn(DataType storedType, DataBlock dataBlock, int colId,
@Nullable RoaringBitmap nullBitmap) {
int numRows = dataBlock.getNumberOfRows();
long[] values = new long[numRows];
if (numRows == 0 || storedType == DataType.UNKNOWN) {
return values;
}
if (nullBitmap == null) {
switch (storedType) {
case INT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getInt(rowId, colId);
}
break;
case LONG:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getLong(rowId, colId);
}
break;
case FLOAT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = (long) dataBlock.getFloat(rowId, colId);
}
break;
case DOUBLE:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = (long) dataBlock.getDouble(rowId, colId);
}
break;
case BIG_DECIMAL:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getBigDecimal(rowId, colId).longValue();
}
break;
default:
throw new IllegalStateException("Cannot extract long values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
} else {
switch (storedType) {
case INT:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getInt(rowId, colId);
}
}
break;
case LONG:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getLong(rowId, colId);
}
}
break;
case FLOAT:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = (long) dataBlock.getFloat(rowId, colId);
}
}
break;
case DOUBLE:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = (long) dataBlock.getDouble(rowId, colId);
}
}
break;
case BIG_DECIMAL:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getBigDecimal(rowId, colId).longValue();
}
}
break;
default:
throw new IllegalStateException("Cannot extract long values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
}
return values;
}
public static float[] extractFloatColumn(DataType storedType, DataBlock dataBlock, int colId,
@Nullable RoaringBitmap nullBitmap) {
int numRows = dataBlock.getNumberOfRows();
float[] values = new float[numRows];
if (numRows == 0 || storedType == DataType.UNKNOWN) {
return values;
}
if (nullBitmap == null) {
switch (storedType) {
case INT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getInt(rowId, colId);
}
break;
case LONG:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getLong(rowId, colId);
}
break;
case FLOAT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getFloat(rowId, colId);
}
break;
case DOUBLE:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = (float) dataBlock.getDouble(rowId, colId);
}
break;
case BIG_DECIMAL:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getBigDecimal(rowId, colId).floatValue();
}
break;
default:
throw new IllegalStateException("Cannot extract float values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
} else {
switch (storedType) {
case INT:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getInt(rowId, colId);
}
}
break;
case LONG:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getLong(rowId, colId);
}
}
break;
case FLOAT:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getFloat(rowId, colId);
}
}
break;
case DOUBLE:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = (float) dataBlock.getDouble(rowId, colId);
}
}
break;
case BIG_DECIMAL:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getBigDecimal(rowId, colId).floatValue();
}
}
break;
default:
throw new IllegalStateException("Cannot extract float values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
}
return values;
}
public static double[] extractDoubleColumn(DataType storedType, DataBlock dataBlock, int colId,
@Nullable RoaringBitmap nullBitmap) {
int numRows = dataBlock.getNumberOfRows();
double[] values = new double[numRows];
if (numRows == 0 || storedType == DataType.UNKNOWN) {
return values;
}
if (nullBitmap == null) {
switch (storedType) {
case INT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getInt(rowId, colId);
}
break;
case LONG:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getLong(rowId, colId);
}
break;
case FLOAT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getFloat(rowId, colId);
}
break;
case DOUBLE:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getDouble(rowId, colId);
}
break;
case BIG_DECIMAL:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getBigDecimal(rowId, colId).doubleValue();
}
break;
default:
throw new IllegalStateException("Cannot extract double values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
} else {
switch (storedType) {
case INT:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getInt(rowId, colId);
}
}
break;
case LONG:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getLong(rowId, colId);
}
}
break;
case FLOAT:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getFloat(rowId, colId);
}
}
break;
case DOUBLE:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getDouble(rowId, colId);
}
}
break;
case BIG_DECIMAL:
for (int rowId = 0; rowId < numRows; rowId++) {
if (!nullBitmap.contains(rowId)) {
values[rowId] = dataBlock.getBigDecimal(rowId, colId).doubleValue();
}
}
break;
default:
throw new IllegalStateException("Cannot extract double values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
}
return values;
}
public static BigDecimal[] extractBigDecimalColumn(DataType storedType, DataBlock dataBlock, int colId,
@Nullable RoaringBitmap nullBitmap) {
int numRows = dataBlock.getNumberOfRows();
BigDecimal[] values = new BigDecimal[numRows];
if (numRows == 0) {
return values;
}
if (storedType == DataType.UNKNOWN) {
Arrays.fill(values, NullValuePlaceHolder.BIG_DECIMAL);
return values;
}
if (nullBitmap == null) {
switch (storedType) {
case INT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = BigDecimal.valueOf(dataBlock.getInt(rowId, colId));
}
break;
case LONG:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = BigDecimal.valueOf(dataBlock.getLong(rowId, colId));
}
break;
case FLOAT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = BigDecimal.valueOf(dataBlock.getFloat(rowId, colId));
}
break;
case DOUBLE:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = BigDecimal.valueOf(dataBlock.getDouble(rowId, colId));
}
break;
case BIG_DECIMAL:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getBigDecimal(rowId, colId);
}
break;
default:
throw new IllegalStateException("Cannot extract BigDecimal values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
} else {
switch (storedType) {
case INT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = !nullBitmap.contains(rowId) ? BigDecimal.valueOf(dataBlock.getInt(rowId, colId))
: NullValuePlaceHolder.BIG_DECIMAL;
}
break;
case LONG:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = !nullBitmap.contains(rowId) ? BigDecimal.valueOf(dataBlock.getLong(rowId, colId))
: NullValuePlaceHolder.BIG_DECIMAL;
}
break;
case FLOAT:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] =
!nullBitmap.contains(rowId) ? new BigDecimal(Float.toString(dataBlock.getFloat(rowId, colId)))
: NullValuePlaceHolder.BIG_DECIMAL;
}
break;
case DOUBLE:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] =
!nullBitmap.contains(rowId) ? new BigDecimal(Double.toString(dataBlock.getDouble(rowId, colId)))
: NullValuePlaceHolder.BIG_DECIMAL;
}
break;
case BIG_DECIMAL:
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] =
!nullBitmap.contains(rowId) ? dataBlock.getBigDecimal(rowId, colId) : NullValuePlaceHolder.BIG_DECIMAL;
}
break;
default:
throw new IllegalStateException("Cannot extract BigDecimal values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
}
return values;
}
public static String[] extractStringColumn(DataType storedType, DataBlock dataBlock, int colId,
@Nullable RoaringBitmap nullBitmap) {
int numRows = dataBlock.getNumberOfRows();
String[] values = new String[numRows];
if (numRows == 0) {
return values;
}
if (storedType == DataType.UNKNOWN) {
Arrays.fill(values, NullValuePlaceHolder.STRING);
return values;
}
Preconditions.checkState(storedType == DataType.STRING,
"Cannot extract String values for column: %s with stored type: %s",
dataBlock.getDataSchema().getColumnName(colId), storedType);
if (nullBitmap == null) {
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getString(rowId, colId);
}
} else {
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = !nullBitmap.contains(rowId) ? dataBlock.getString(rowId, colId) : NullValuePlaceHolder.STRING;
}
}
return values;
}
public static byte[][] extractBytesColumn(DataType storedType, DataBlock dataBlock, int colId,
@Nullable RoaringBitmap nullBitmap) {
int numRows = dataBlock.getNumberOfRows();
byte[][] values = new byte[numRows][];
if (numRows == 0) {
return values;
}
if (storedType == DataType.UNKNOWN) {
Arrays.fill(values, NullValuePlaceHolder.BYTES);
return values;
}
Preconditions.checkState(storedType == DataType.BYTES,
"Cannot extract byte[] values for column: %s with stored type: %s",
dataBlock.getDataSchema().getColumnName(colId), storedType);
if (nullBitmap == null) {
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] = dataBlock.getBytes(rowId, colId).getBytes();
}
} else {
for (int rowId = 0; rowId < numRows; rowId++) {
values[rowId] =
!nullBitmap.contains(rowId) ? dataBlock.getBytes(rowId, colId).getBytes() : NullValuePlaceHolder.BYTES;
}
}
return values;
}
public static int[] extractIntColumn(DataType storedType, DataBlock dataBlock, int colId, int numMatchedRows,
RoaringBitmap matchedBitmap, @Nullable RoaringBitmap matchedNullBitmap) {
int[] values = new int[numMatchedRows];
if (numMatchedRows == 0 || storedType == DataType.UNKNOWN) {
return values;
}
PeekableIntIterator iterator = matchedBitmap.getIntIterator();
for (int matchedRowId = 0; matchedRowId < numMatchedRows; matchedRowId++) {
int rowId = iterator.next();
if (matchedNullBitmap != null && matchedNullBitmap.contains(matchedRowId)) {
continue;
}
switch (storedType) {
case INT:
values[matchedRowId] = dataBlock.getInt(rowId, colId);
break;
case LONG:
values[matchedRowId] = (int) dataBlock.getLong(rowId, colId);
break;
case FLOAT:
values[matchedRowId] = (int) dataBlock.getFloat(rowId, colId);
break;
case DOUBLE:
values[matchedRowId] = (int) dataBlock.getDouble(rowId, colId);
break;
case BIG_DECIMAL:
values[matchedRowId] = dataBlock.getBigDecimal(rowId, colId).intValue();
break;
default:
throw new IllegalStateException("Cannot extract int values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
}
return values;
}
public static long[] extractLongColumn(DataType storedType, DataBlock dataBlock, int colId, int numMatchedRows,
RoaringBitmap matchedBitmap, @Nullable RoaringBitmap matchedNullBitmap) {
long[] values = new long[numMatchedRows];
if (numMatchedRows == 0 || storedType == DataType.UNKNOWN) {
return values;
}
PeekableIntIterator iterator = matchedBitmap.getIntIterator();
for (int matchedRowId = 0; matchedRowId < numMatchedRows; matchedRowId++) {
int rowId = iterator.next();
if (matchedNullBitmap != null && matchedNullBitmap.contains(matchedRowId)) {
continue;
}
switch (storedType) {
case INT:
values[matchedRowId] = dataBlock.getInt(rowId, colId);
break;
case LONG:
values[matchedRowId] = dataBlock.getLong(rowId, colId);
break;
case FLOAT:
values[matchedRowId] = (long) dataBlock.getFloat(rowId, colId);
break;
case DOUBLE:
values[matchedRowId] = (long) dataBlock.getDouble(rowId, colId);
break;
case BIG_DECIMAL:
values[matchedRowId] = dataBlock.getBigDecimal(rowId, colId).longValue();
break;
default:
throw new IllegalStateException("Cannot extract long values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
}
return values;
}
public static float[] extractFloatColumn(DataType storedType, DataBlock dataBlock, int colId, int numMatchedRows,
RoaringBitmap matchedBitmap, @Nullable RoaringBitmap matchedNullBitmap) {
float[] values = new float[numMatchedRows];
if (numMatchedRows == 0 || storedType == DataType.UNKNOWN) {
return values;
}
PeekableIntIterator iterator = matchedBitmap.getIntIterator();
for (int matchedRowId = 0; matchedRowId < numMatchedRows; matchedRowId++) {
int rowId = iterator.next();
if (matchedNullBitmap != null && matchedNullBitmap.contains(matchedRowId)) {
continue;
}
switch (storedType) {
case INT:
values[matchedRowId] = dataBlock.getInt(rowId, colId);
break;
case LONG:
values[matchedRowId] = dataBlock.getLong(rowId, colId);
break;
case FLOAT:
values[matchedRowId] = dataBlock.getFloat(rowId, colId);
break;
case DOUBLE:
values[matchedRowId] = (float) dataBlock.getDouble(rowId, colId);
break;
case BIG_DECIMAL:
values[matchedRowId] = dataBlock.getBigDecimal(rowId, colId).floatValue();
break;
default:
throw new IllegalStateException("Cannot extract float values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
}
return values;
}
public static double[] extractDoubleColumn(DataType storedType, DataBlock dataBlock, int colId, int numMatchedRows,
RoaringBitmap matchedBitmap, @Nullable RoaringBitmap matchedNullBitmap) {
double[] values = new double[numMatchedRows];
if (numMatchedRows == 0 || storedType == DataType.UNKNOWN) {
return values;
}
PeekableIntIterator iterator = matchedBitmap.getIntIterator();
for (int matchedRowId = 0; matchedRowId < numMatchedRows; matchedRowId++) {
int rowId = iterator.next();
if (matchedNullBitmap != null && matchedNullBitmap.contains(matchedRowId)) {
continue;
}
switch (storedType) {
case INT:
values[matchedRowId] = dataBlock.getInt(rowId, colId);
break;
case LONG:
values[matchedRowId] = dataBlock.getLong(rowId, colId);
break;
case FLOAT:
values[matchedRowId] = dataBlock.getFloat(rowId, colId);
break;
case DOUBLE:
values[matchedRowId] = dataBlock.getDouble(rowId, colId);
break;
case BIG_DECIMAL:
values[matchedRowId] = dataBlock.getBigDecimal(rowId, colId).doubleValue();
break;
default:
throw new IllegalStateException("Cannot extract double values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
}
return values;
}
public static BigDecimal[] extractBigDecimalColumn(DataType storedType, DataBlock dataBlock, int colId,
int numMatchedRows, RoaringBitmap matchedBitmap, @Nullable RoaringBitmap matchedNullBitmap) {
BigDecimal[] values = new BigDecimal[numMatchedRows];
if (numMatchedRows == 0) {
return values;
}
if (storedType == DataType.UNKNOWN) {
Arrays.fill(values, NullValuePlaceHolder.BIG_DECIMAL);
return values;
}
PeekableIntIterator iterator = matchedBitmap.getIntIterator();
for (int matchedRowId = 0; matchedRowId < numMatchedRows; matchedRowId++) {
int rowId = iterator.next();
if (matchedNullBitmap != null && matchedNullBitmap.contains(matchedRowId)) {
values[matchedRowId] = NullValuePlaceHolder.BIG_DECIMAL;
continue;
}
switch (storedType) {
case INT:
values[matchedRowId] = BigDecimal.valueOf(dataBlock.getInt(rowId, colId));
break;
case LONG:
values[matchedRowId] = BigDecimal.valueOf(dataBlock.getLong(rowId, colId));
break;
case FLOAT:
values[matchedRowId] = BigDecimal.valueOf(dataBlock.getFloat(rowId, colId));
break;
case DOUBLE:
values[matchedRowId] = BigDecimal.valueOf(dataBlock.getDouble(rowId, colId));
break;
case BIG_DECIMAL:
values[matchedRowId] = dataBlock.getBigDecimal(rowId, colId);
break;
default:
throw new IllegalStateException("Cannot extract BigDecimal values for column: "
+ dataBlock.getDataSchema().getColumnName(colId) + " with stored type: " + storedType);
}
}
return values;
}
public static String[] extractStringColumn(DataType storedType, DataBlock dataBlock, int colId, int numMatchedRows,
RoaringBitmap matchedBitmap, @Nullable RoaringBitmap matchedNullBitmap) {
String[] values = new String[numMatchedRows];
if (numMatchedRows == 0) {
return values;
}
if (storedType == DataType.UNKNOWN) {
Arrays.fill(values, NullValuePlaceHolder.STRING);
return values;
}
Preconditions.checkState(storedType == DataType.STRING,
"Cannot extract String values for column: %s with stored type: %s",
dataBlock.getDataSchema().getColumnName(colId), storedType);
PeekableIntIterator iterator = matchedBitmap.getIntIterator();
for (int matchedRowId = 0; matchedRowId < numMatchedRows; matchedRowId++) {
int rowId = iterator.next();
boolean isNull = matchedNullBitmap != null && matchedNullBitmap.contains(matchedRowId);
values[matchedRowId] = !isNull ? dataBlock.getString(rowId, colId) : NullValuePlaceHolder.STRING;
}
return values;
}
public static byte[][] extractBytesColumn(DataType storedType, DataBlock dataBlock, int colId, int numMatchedRows,
RoaringBitmap matchedBitmap, @Nullable RoaringBitmap matchedNullBitmap) {
byte[][] values = new byte[numMatchedRows][];
if (numMatchedRows == 0) {
return values;
}
if (storedType == DataType.UNKNOWN) {
Arrays.fill(values, NullValuePlaceHolder.BYTES);
return values;
}
Preconditions.checkState(storedType == DataType.BYTES,
"Cannot extract byte[] values for column: %s with stored type: %s",
dataBlock.getDataSchema().getColumnName(colId), storedType);
PeekableIntIterator iterator = matchedBitmap.getIntIterator();
for (int matchedRowId = 0; matchedRowId < numMatchedRows; matchedRowId++) {
int rowId = iterator.next();
boolean isNull = matchedNullBitmap != null && matchedNullBitmap.contains(matchedRowId);
values[matchedRowId] = !isNull ? dataBlock.getBytes(rowId, colId).getBytes() : NullValuePlaceHolder.BYTES;
}
return values;
}
}
|
google/j2objc | 35,193 | jre_emul/android/platform/libcore/harmony-tests/src/test/java/org/apache/harmony/tests/java/nio/charset/CharsetTest.java | /* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.tests.java.nio.charset;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CharsetEncoder;
import java.nio.charset.CoderResult;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.spi.CharsetProvider;
import java.nio.charset.UnsupportedCharsetException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Locale;
import java.util.Set;
import java.util.Vector;
import libcore.java.nio.charset.SettableCharsetProvider;
import junit.framework.TestCase;
/**
* Test class java.nio.Charset.
*/
public class CharsetTest extends TestCase {
public void test_allAvailableCharsets() throws Exception {
// Check that we can instantiate every Charset, CharsetDecoder, and CharsetEncoder.
for (String charsetName : Charset.availableCharsets().keySet()) {
if (charsetName.equals("UTF-32")) {
// Our UTF-32 is broken. http://b/2702411
// TODO: remove this hack when UTF-32 is fixed.
continue;
}
Charset cs = Charset.forName(charsetName);
assertNotNull(cs.newDecoder());
if (cs.canEncode()) {
CharsetEncoder enc = cs.newEncoder();
assertNotNull(enc);
assertNotNull(enc.replacement());
}
}
}
public void test_defaultCharset() {
assertEquals("UTF-8", Charset.defaultCharset().name());
}
public void test_isRegistered() {
// Regression for HARMONY-45
// Will contain names of charsets registered with IANA
Set<String> knownRegisteredCharsets = new HashSet<String>();
// Will contain names of charsets not known to be registered with IANA
Set<String> unknownRegisteredCharsets = new HashSet<String>();
Set<String> names = Charset.availableCharsets().keySet();
for (Iterator nameItr = names.iterator(); nameItr.hasNext();) {
String name = (String) nameItr.next();
if (name.toLowerCase(Locale.ROOT).startsWith("x-")) {
unknownRegisteredCharsets.add(name);
} else {
knownRegisteredCharsets.add(name);
}
}
for (Iterator nameItr = knownRegisteredCharsets.iterator(); nameItr.hasNext();) {
String name = (String) nameItr.next();
Charset cs = Charset.forName(name);
if (!cs.isRegistered()) {
System.err.println("isRegistered was false for " + name + " " + cs.name() + " " + cs.aliases());
}
assertTrue("isRegistered was false for " + name + " " + cs.name() + " " + cs.aliases(), cs.isRegistered());
}
for (Iterator nameItr = unknownRegisteredCharsets.iterator(); nameItr.hasNext();) {
String name = (String) nameItr.next();
Charset cs = Charset.forName(name);
assertFalse("isRegistered was true for " + name + " " + cs.name() + " " + cs.aliases(), cs.isRegistered());
}
}
public void test_guaranteedCharsetsAvailable() throws Exception {
// All Java implementations must support these charsets.
assertNotNull(Charset.forName("ISO-8859-1"));
assertNotNull(Charset.forName("US-ASCII"));
assertNotNull(Charset.forName("UTF-16"));
assertNotNull(Charset.forName("UTF-16BE"));
assertNotNull(Charset.forName("UTF-16LE"));
assertNotNull(Charset.forName("UTF-8"));
}
// http://code.google.com/p/android/issues/detail?id=42769
public void test_42769() throws Exception {
ArrayList<Thread> threads = new ArrayList<Thread>();
for (int i = 0; i < 10; ++i) {
Thread t = new Thread(new Runnable() {
public void run() {
for (int i = 0; i < 50; ++i) {
Charset.availableCharsets();
}
}
});
threads.add(t);
}
for (Thread t : threads) {
t.start();
}
for (Thread t : threads) {
t.join();
}
}
public void test_have_canonical_EUC_JP() throws Exception {
assertEquals("EUC-JP", Charset.forName("EUC-JP").name());
}
/* j2objc: b/139491456
public void test_EUC_JP_replacement_character() throws Exception {
// We have text either side of the replacement character, because all kinds of errors
// could lead to a replacement character being returned.
assertEncodes(Charset.forName("EUC-JP"), " \ufffd ", ' ', 0xf4, 0xfe, ' ');
assertDecodes(Charset.forName("EUC-JP"), " \ufffd ", ' ', 0xf4, 0xfe, ' ');
}
*/
/* j2objc: iOS doesn't support SCSU charset.
public void test_SCSU_replacement_character() throws Exception {
// We have text either side of the replacement character, because all kinds of errors
// could lead to a replacement character being returned.
assertEncodes(Charset.forName("SCSU"), " \ufffd ", ' ', 14, 0xff, 0xfd, ' ');
assertDecodes(Charset.forName("SCSU"), " \ufffd ", ' ', 14, 0xff, 0xfd, ' ');
}
*/
/* j2objc: b/139491456
public void test_Shift_JIS_replacement_character() throws Exception {
// We have text either side of the replacement character, because all kinds of errors
// could lead to a replacement character being returned.
assertEncodes(Charset.forName("Shift_JIS"), " \ufffd ", ' ', 0xfc, 0xfc, ' ');
assertDecodes(Charset.forName("Shift_JIS"), " \ufffd ", ' ', 0xfc, 0xfc, ' ');
}
*/
public void test_UTF_16() throws Exception {
Charset cs = Charset.forName("UTF-16");
// Writes big-endian, with a big-endian BOM.
assertEncodes(cs, "a\u0666", 0xfe, 0xff, 0, 'a', 0x06, 0x66);
// Reads whatever the BOM tells it to read...
assertDecodes(cs, "a\u0666", 0xfe, 0xff, 0, 'a', 0x06, 0x66);
assertDecodes(cs, "a\u0666", 0xff, 0xfe, 'a', 0, 0x66, 0x06);
// ...and defaults to reading big-endian if there's no BOM.
assertDecodes(cs, "a\u0666", 0, 'a', 0x06, 0x66);
}
/* j2objc: b/139491456
public void test_UTF_16BE() throws Exception {
Charset cs = Charset.forName("UTF-16BE");
// Writes big-endian, with no BOM.
assertEncodes(cs, "a\u0666", 0, 'a', 0x06, 0x66);
// Treats a little-endian BOM as an error and continues to read big-endian.
// This test uses REPLACE mode, so we get the U+FFFD replacement character in the result.
assertDecodes(cs, "\ufffda\u0666", 0xff, 0xfe, 0, 'a', 0x06, 0x66);
// Accepts a big-endian BOM and includes U+FEFF in the decoded output.
assertDecodes(cs, "\ufeffa\u0666", 0xfe, 0xff, 0, 'a', 0x06, 0x66);
// Defaults to reading big-endian.
assertDecodes(cs, "a\u0666", 0, 'a', 0x06, 0x66);
}
*/
/* j2objc: b/139491456
public void test_UTF_16LE() throws Exception {
Charset cs = Charset.forName("UTF-16LE");
// Writes little-endian, with no BOM.
assertEncodes(cs, "a\u0666", 'a', 0, 0x66, 0x06);
// Accepts a little-endian BOM and includes U+FEFF in the decoded output.
assertDecodes(cs, "\ufeffa\u0666", 0xff, 0xfe, 'a', 0, 0x66, 0x06);
// Treats a big-endian BOM as an error and continues to read little-endian.
// This test uses REPLACE mode, so we get the U+FFFD replacement character in the result.
assertDecodes(cs, "\ufffda\u0666", 0xfe, 0xff, 'a', 0, 0x66, 0x06);
// Defaults to reading little-endian.
assertDecodes(cs, "a\u0666", 'a', 0, 0x66, 0x06);
}
*/
/* j2objc: iOS doesn't support x-UTF-16LE-BOM charset.
public void test_x_UTF_16LE_BOM() throws Exception {
Charset cs = Charset.forName("x-UTF-16LE-BOM");
// Writes little-endian, with a BOM.
assertEncodes(cs, "a\u0666", 0xff, 0xfe, 'a', 0, 0x66, 0x06);
// Accepts a little-endian BOM and swallows the BOM.
assertDecodes(cs, "a\u0666", 0xff, 0xfe, 'a', 0, 0x66, 0x06);
// Swallows a big-endian BOM, but continues to read little-endian!
assertDecodes(cs, "\u6100\u6606", 0xfe, 0xff, 'a', 0, 0x66, 0x06);
// Defaults to reading little-endian.
assertDecodes(cs, "a\u0666", 'a', 0, 0x66, 0x06);
}
*/
/* j2objc: b/139491456
public void test_UTF_32() throws Exception {
Charset cs = Charset.forName("UTF-32");
// Writes big-endian, with no BOM.
assertEncodes(cs, "a\u0666", 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
// Reads whatever the BOM tells it to read...
assertDecodes(cs, "a\u0666", 0, 0, 0xfe, 0xff, 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
assertDecodes(cs, "a\u0666", 0xff, 0xfe, 0, 0, 'a', 0, 0, 0, 0x66, 0x06, 0, 0);
// ...and defaults to reading big-endian if there's no BOM.
assertDecodes(cs, "a\u0666", 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
}
*/
/* j2objc: b/139491456
public void test_UTF_32BE() throws Exception {
Charset cs = Charset.forName("UTF-32BE");
// Writes big-endian, with no BOM.
assertEncodes(cs, "a\u0666", 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
// Treats a little-endian BOM as an error and continues to read big-endian.
// This test uses REPLACE mode, so we get the U+FFFD replacement character in the result.
assertDecodes(cs, "\ufffda\u0666", 0xff, 0xfe, 0, 0, 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
// Accepts a big-endian BOM and swallows the BOM.
assertDecodes(cs, "a\u0666", 0, 0, 0xfe, 0xff, 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
// Defaults to reading big-endian.
assertDecodes(cs, "a\u0666", 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
}
*/
/* j2objc: b/139491456
public void test_UTF_32LE() throws Exception {
Charset cs = Charset.forName("UTF-32LE");
// Writes little-endian, with no BOM.
assertEncodes(cs, "a\u0666", 'a', 0, 0, 0, 0x66, 0x06, 0, 0);
// Accepts a little-endian BOM and swallows the BOM.
assertDecodes(cs, "a\u0666", 0xff, 0xfe, 0, 0, 'a', 0, 0, 0, 0x66, 0x06, 0, 0);
// Treats a big-endian BOM as an error and continues to read little-endian.
// This test uses REPLACE mode, so we get the U+FFFD replacement character in the result.
assertDecodes(cs, "\ufffda\u0666", 0, 0, 0xfe, 0xff, 'a', 0, 0, 0, 0x66, 0x06, 0, 0);
// Defaults to reading little-endian.
assertDecodes(cs, "a\u0666", 'a', 0, 0, 0, 0x66, 0x06, 0, 0);
}
*/
/* j2objc: iOS doesn't support X-UTF-32BE-BOM charset.
public void test_X_UTF_32BE_BOM() throws Exception {
Charset cs = Charset.forName("X-UTF-32BE-BOM");
// Writes big-endian, with a big-endian BOM.
assertEncodes(cs, "a\u0666", 0, 0, 0xfe, 0xff, 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
// Treats a little-endian BOM as an error and continues to read big-endian.
// This test uses REPLACE mode, so we get the U+FFFD replacement character in the result.
assertDecodes(cs, "\ufffda\u0666", 0xff, 0xfe, 0, 0, 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
// Swallows a big-endian BOM, and continues to read big-endian.
assertDecodes(cs, "a\u0666", 0, 0, 0xfe, 0xff, 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
// Defaults to reading big-endian.
assertDecodes(cs, "a\u0666", 0, 0, 0, 'a', 0, 0, 0x06, 0x66);
}
*/
/* j2objc: iOS doesn't support X-UTF-32LE-BOM charset.
public void test_X_UTF_32LE_BOM() throws Exception {
Charset cs = Charset.forName("X-UTF-32LE-BOM");
// Writes little-endian, with a little-endian BOM.
assertEncodes(cs, "a\u0666", 0xff, 0xfe, 0, 0, 'a', 0, 0, 0, 0x66, 0x06, 0, 0);
// Accepts a little-endian BOM and swallows the BOM.
assertDecodes(cs, "a\u0666", 0xff, 0xfe, 0, 0, 'a', 0, 0, 0, 0x66, 0x06, 0, 0);
// Treats a big-endian BOM as an error and continues to read little-endian.
// This test uses REPLACE mode, so we get the U+FFFD replacement character in the result.
assertDecodes(cs, "\ufffda\u0666", 0, 0, 0xfe, 0xff, 'a', 0, 0, 0, 0x66, 0x06, 0, 0);
// Defaults to reading little-endian.
assertDecodes(cs, "a\u0666", 'a', 0, 0, 0, 0x66, 0x06, 0, 0);
}
*/
private byte[] toByteArray(int[] ints) {
byte[] result = new byte[ints.length];
for (int i = 0; i < ints.length; ++i) {
result[i] = (byte) ints[i];
}
return result;
}
private void assertEncodes(Charset cs, String s, int... expectedByteInts) throws Exception {
ByteBuffer out = cs.encode(s);
byte[] bytes = new byte[out.remaining()];
out.get(bytes);
assertEquals(Arrays.toString(toByteArray(expectedByteInts)), Arrays.toString(bytes));
}
private void assertDecodes(Charset cs, String s, int... byteInts) throws Exception {
ByteBuffer in = ByteBuffer.wrap(toByteArray(byteInts));
CharBuffer out = cs.decode(in);
assertEquals(s, out.toString());
}
public void test_forNameLjava_lang_String() {
// Invoke forName two times with the same canonical name.
// It should return the same reference.
Charset cs1 = Charset.forName("UTF-8");
Charset cs2 = Charset.forName("UTF-8");
assertSame(cs1, cs2);
// test forName: invoke forName two times for the same Charset using
// canonical name and alias, it should return the same reference.
Charset cs3 = Charset.forName("ASCII");
Charset cs4 = Charset.forName("US-ASCII");
assertSame(cs3, cs4);
}
static MockCharset charset1 = new MockCharset("mockCharset00",
new String[] { "mockCharset01", "mockCharset02" });
static MockCharset charset2 = new MockCharset("mockCharset10",
new String[] { "mockCharset11", "mockCharset12" });
// Test the required 6 charsets are supported.
public void testRequiredCharsetSupported() {
assertTrue(Charset.isSupported("US-ASCII"));
assertTrue(Charset.isSupported("ASCII"));
assertTrue(Charset.isSupported("ISO-8859-1"));
assertTrue(Charset.isSupported("ISO8859_1"));
assertTrue(Charset.isSupported("UTF-8"));
assertTrue(Charset.isSupported("UTF8"));
assertTrue(Charset.isSupported("UTF-16"));
assertTrue(Charset.isSupported("UTF-16BE"));
assertTrue(Charset.isSupported("UTF-16LE"));
Charset c1 = Charset.forName("US-ASCII");
assertEquals("US-ASCII", Charset.forName("US-ASCII").name());
assertEquals("US-ASCII", Charset.forName("ASCII").name());
assertEquals("ISO-8859-1", Charset.forName("ISO-8859-1").name());
assertEquals("ISO-8859-1", Charset.forName("ISO8859_1").name());
assertEquals("UTF-8", Charset.forName("UTF-8").name());
assertEquals("UTF-8", Charset.forName("UTF8").name());
assertEquals("UTF-16", Charset.forName("UTF-16").name());
assertEquals("UTF-16BE", Charset.forName("UTF-16BE").name());
assertEquals("UTF-16LE", Charset.forName("UTF-16LE").name());
assertNotSame(Charset.availableCharsets(), Charset.availableCharsets());
// assertSame(Charset.forName("US-ASCII"), Charset.availableCharsets().get("US-ASCII"));
// assertSame(Charset.forName("US-ASCII"), c1);
assertTrue(Charset.availableCharsets().containsKey("US-ASCII"));
assertTrue(Charset.availableCharsets().containsKey("ISO-8859-1"));
assertTrue(Charset.availableCharsets().containsKey("UTF-8"));
assertTrue(Charset.availableCharsets().containsKey("UTF-16"));
assertTrue(Charset.availableCharsets().containsKey("UTF-16BE"));
assertTrue(Charset.availableCharsets().containsKey("UTF-16LE"));
}
public void testIsSupported_Null() {
try {
Charset.isSupported(null);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testIsSupported_EmptyString() {
try {
Charset.isSupported("");
fail();
} catch (IllegalArgumentException expected) {
}
}
/* j2objc: b/139491456
public void testIsSupported_InvalidInitialCharacter() {
try {
Charset.isSupported(".char");
fail();
} catch (IllegalArgumentException expected) {
}
}
*/
public void testIsSupported_IllegalName() {
try {
Charset.isSupported(" ///#$$");
fail();
} catch (IllegalCharsetNameException expected) {
}
}
public void testIsSupported_NotSupported() {
assertFalse(Charset.isSupported("well-formed-name-of-a-charset-that-does-not-exist"));
}
public void testForName_Null() {
try {
Charset.forName(null);
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testForName_EmptyString() {
try {
Charset.forName("");
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testForName_InvalidInitialCharacter() {
try {
Charset.forName(".char");
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testForName_IllegalName() {
try {
Charset.forName(" ///#$$");
fail();
} catch (IllegalCharsetNameException expected) {
}
}
public void testForName_NotSupported() {
try {
Charset.forName("impossible");
fail();
} catch (UnsupportedCharsetException expected) {
}
}
public void testConstructor_Normal() {
final String mockName = "mockChar1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ.:-_";
MockCharset c = new MockCharset(mockName, new String[] { "mock" });
assertEquals(mockName, c.name());
assertEquals(mockName, c.displayName());
assertEquals(mockName, c.displayName(Locale.getDefault()));
assertEquals("mock", c.aliases().toArray()[0]);
assertEquals(1, c.aliases().toArray().length);
}
public void testConstructor_EmptyCanonicalName() {
try {
new MockCharset("", new String[0]);
fail();
} catch (IllegalCharsetNameException expected) {
}
}
/* j2objc: b/139491456
public void testConstructor_IllegalCanonicalName_Initial() {
try {
new MockCharset("-123", new String[] { "mock" });
fail();
} catch (IllegalCharsetNameException expected) {
}
}
*/
public void testConstructor_IllegalCanonicalName_Middle() {
try {
new MockCharset("1%%23", new String[] { "mock" });
fail();
} catch (IllegalCharsetNameException expected) {
}
try {
new MockCharset("1//23", new String[] { "mock" });
fail();
} catch (IllegalCharsetNameException expected) {
}
}
public void testConstructor_NullCanonicalName() {
try {
MockCharset c = new MockCharset(null, new String[] { "mock" });
fail();
} catch (NullPointerException expected) {
}
}
public void testConstructor_NullAliases() {
MockCharset c = new MockCharset("mockChar", null);
assertEquals("mockChar", c.name());
assertEquals("mockChar", c.displayName());
assertEquals("mockChar", c.displayName(Locale.getDefault()));
assertEquals(0, c.aliases().toArray().length);
}
public void testConstructor_NullAliase() {
try {
new MockCharset("mockChar", new String[] { "mock", null });
fail();
} catch (NullPointerException expected) {
}
}
public void testConstructor_NoAliases() {
MockCharset c = new MockCharset("mockChar", new String[0]);
assertEquals("mockChar", c.name());
assertEquals("mockChar", c.displayName());
assertEquals("mockChar", c.displayName(Locale.getDefault()));
assertEquals(0, c.aliases().toArray().length);
}
public void testConstructor_EmptyAliases() {
try {
new MockCharset("mockChar", new String[] { "" });
fail();
} catch (IllegalCharsetNameException expected) {
}
}
// Test the constructor with illegal aliases: starting with neither a digit nor a letter.
/* j2objc: b/139491456
public void testConstructor_IllegalAliases_Initial() {
try {
new MockCharset("mockChar", new String[] { "mock", "-123" });
fail();
} catch (IllegalCharsetNameException e) {
}
}
*/
public void testConstructor_IllegalAliases_Middle() {
try {
new MockCharset("mockChar", new String[] { "mock", "22##ab" });
fail();
} catch (IllegalCharsetNameException expected) {
}
try {
new MockCharset("mockChar", new String[] { "mock", "22%%ab" });
fail();
} catch (IllegalCharsetNameException expected) {
}
}
public void testAliases_Multiple() {
final String mockName = "mockChar1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ.:-_";
MockCharset c = new MockCharset("mockChar", new String[] { "mock", mockName, "mock2" });
assertEquals("mockChar", c.name());
assertEquals(3, c.aliases().size());
assertTrue(c.aliases().contains("mock"));
assertTrue(c.aliases().contains(mockName));
assertTrue(c.aliases().contains("mock2"));
try {
c.aliases().clear();
fail();
} catch (UnsupportedOperationException expected) {
}
}
public void testAliases_Duplicate() {
final String mockName = "mockChar1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ.:-_";
MockCharset c = new MockCharset("mockChar", new String[] { "mockChar",
"mock", mockName, "mock", "mockChar", "mock", "mock2" });
assertEquals("mockChar", c.name());
assertEquals(4, c.aliases().size());
assertTrue(c.aliases().contains("mockChar"));
assertTrue(c.aliases().contains("mock"));
assertTrue(c.aliases().contains(mockName));
assertTrue(c.aliases().contains("mock2"));
}
public void testCanEncode() {
MockCharset c = new MockCharset("mock", null);
assertTrue(c.canEncode());
}
public void testIsRegistered() {
MockCharset c = new MockCharset("mock", null);
assertTrue(c.isRegistered());
}
public void testDisplayName_Locale_Null() {
MockCharset c = new MockCharset("mock", null);
assertEquals("mock", c.displayName(null));
}
public void testCompareTo_Normal() {
MockCharset c1 = new MockCharset("mock", null);
assertEquals(0, c1.compareTo(c1));
MockCharset c2 = new MockCharset("Mock", null);
assertEquals(0, c1.compareTo(c2));
c2 = new MockCharset("mock2", null);
assertTrue(c1.compareTo(c2) < 0);
assertTrue(c2.compareTo(c1) > 0);
c2 = new MockCharset("mack", null);
assertTrue(c1.compareTo(c2) > 0);
assertTrue(c2.compareTo(c1) < 0);
c2 = new MockCharset("m.", null);
assertTrue(c1.compareTo(c2) > 0);
assertTrue(c2.compareTo(c1) < 0);
c2 = new MockCharset("m:", null);
assertEquals("mock".compareToIgnoreCase("m:"), c1.compareTo(c2));
assertEquals("m:".compareToIgnoreCase("mock"), c2.compareTo(c1));
c2 = new MockCharset("m-", null);
assertTrue(c1.compareTo(c2) > 0);
assertTrue(c2.compareTo(c1) < 0);
c2 = new MockCharset("m_", null);
assertTrue(c1.compareTo(c2) > 0);
assertTrue(c2.compareTo(c1) < 0);
}
public void testCompareTo_Null() {
MockCharset c1 = new MockCharset("mock", null);
try {
c1.compareTo(null);
fail();
} catch (NullPointerException expected) {
}
}
public void testCompareTo_DiffCharsetClass() {
MockCharset c1 = new MockCharset("mock", null);
MockCharset2 c2 = new MockCharset2("Mock", new String[] { "myname" });
assertEquals(0, c1.compareTo(c2));
assertEquals(0, c2.compareTo(c1));
}
public void testEquals_Normal() {
MockCharset c1 = new MockCharset("mock", null);
MockCharset2 c2 = new MockCharset2("mock", null);
assertTrue(c1.equals(c2));
assertTrue(c2.equals(c1));
c2 = new MockCharset2("Mock", null);
assertFalse(c1.equals(c2));
assertFalse(c2.equals(c1));
}
public void testEquals_Null() {
MockCharset c1 = new MockCharset("mock", null);
assertFalse(c1.equals(null));
}
public void testEquals_NonCharsetObject() {
MockCharset c1 = new MockCharset("mock", null);
assertFalse(c1.equals("test"));
}
public void testEquals_DiffCharsetClass() {
MockCharset c1 = new MockCharset("mock", null);
MockCharset2 c2 = new MockCharset2("mock", null);
assertTrue(c1.equals(c2));
assertTrue(c2.equals(c1));
}
public void testHashCode_DiffCharsetClass() {
MockCharset c1 = new MockCharset("mock", null);
assertEquals(c1.hashCode(), "mock".hashCode());
final String mockName = "mockChar1234567890abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ.:-_";
c1 = new MockCharset(mockName, new String[] { "mockChar", "mock",
mockName, "mock", "mockChar", "mock", "mock2" });
assertEquals(mockName.hashCode(), c1.hashCode());
}
public void testEncode_CharBuffer_Normal() throws Exception {
MockCharset c1 = new MockCharset("testEncode_CharBuffer_Normal_mock", null);
ByteBuffer bb = c1.encode(CharBuffer.wrap("abcdefg"));
assertEquals("abcdefg", new String(bb.array(), "iso8859-1"));
bb = c1.encode(CharBuffer.wrap(""));
assertEquals("", new String(bb.array(), "iso8859-1"));
}
public void testEncode_CharBuffer_Unmappable() throws Exception {
Charset c1 = Charset.forName("iso8859-1");
ByteBuffer bb = c1.encode(CharBuffer.wrap("abcd\u5D14efg"));
assertEquals(new String(bb.array(), "iso8859-1"),
"abcd" + new String(c1.newEncoder().replacement(), "iso8859-1") + "efg");
}
public void testEncode_CharBuffer_NullCharBuffer() {
MockCharset c = new MockCharset("mock", null);
try {
c.encode((CharBuffer) null);
fail();
} catch (NullPointerException expected) {
}
}
public void testEncode_CharBuffer_NullEncoder() {
MockCharset2 c = new MockCharset2("mock2", null);
try {
c.encode(CharBuffer.wrap("hehe"));
fail();
} catch (NullPointerException expected) {
}
}
public void testEncode_String_Normal() throws Exception {
MockCharset c1 = new MockCharset("testEncode_String_Normal_mock", null);
ByteBuffer bb = c1.encode("abcdefg");
assertEquals("abcdefg", new String(bb.array(), "iso8859-1"));
bb = c1.encode("");
assertEquals("", new String(bb.array(), "iso8859-1"));
}
public void testEncode_String_Unmappable() throws Exception {
Charset c1 = Charset.forName("iso8859-1");
ByteBuffer bb = c1.encode("abcd\u5D14efg");
assertEquals(new String(bb.array(), "iso8859-1"),
"abcd" + new String(c1.newEncoder().replacement(), "iso8859-1") + "efg");
}
public void testEncode_String_NullString() {
MockCharset c = new MockCharset("mock", null);
try {
c.encode((String) null);
fail();
} catch (NullPointerException expected) {
}
}
public void testEncode_String_NullEncoder() {
MockCharset2 c = new MockCharset2("mock2", null);
try {
c.encode("hehe");
fail();
} catch (NullPointerException expected) {
}
}
public void testDecode_Normal() throws Exception {
MockCharset c1 = new MockCharset("mock", null);
CharBuffer cb = c1.decode(ByteBuffer.wrap("abcdefg".getBytes("iso8859-1")));
assertEquals("abcdefg", new String(cb.array()));
cb = c1.decode(ByteBuffer.wrap("".getBytes("iso8859-1")));
assertEquals("", new String(cb.array()));
}
public void testDecode_Malformed() throws Exception {
Charset c1 = Charset.forName("iso8859-1");
CharBuffer cb = c1.decode(ByteBuffer.wrap("abcd\u5D14efg".getBytes("iso8859-1")));
byte[] replacement = c1.newEncoder().replacement();
assertEquals(new String(cb.array()).trim(), "abcd" + new String(replacement, "iso8859-1") + "efg");
}
public void testDecode_NullByteBuffer() {
MockCharset c = new MockCharset("mock", null);
try {
c.decode(null);
fail();
} catch (NullPointerException expected) {
}
}
public void testDecode_NullDecoder() {
MockCharset2 c = new MockCharset2("mock2", null);
try {
c.decode(ByteBuffer.wrap("hehe".getBytes()));
fail();
} catch (NullPointerException expected) {
}
}
public void testToString() {
MockCharset c1 = new MockCharset("mock", null);
assertTrue(-1 != c1.toString().indexOf("mock"));
}
static final class MockCharset extends Charset {
public MockCharset(String canonicalName, String[] aliases) {
super(canonicalName, aliases);
}
public boolean contains(Charset cs) {
return false;
}
public CharsetDecoder newDecoder() {
return new MockDecoder(this);
}
public CharsetEncoder newEncoder() {
return new MockEncoder(this);
}
}
static class MockCharset2 extends Charset {
public MockCharset2(String canonicalName, String[] aliases) {
super(canonicalName, aliases);
}
public boolean contains(Charset cs) {
return false;
}
public CharsetDecoder newDecoder() {
return null;
}
public CharsetEncoder newEncoder() {
return null;
}
}
static class MockEncoder extends java.nio.charset.CharsetEncoder {
public MockEncoder(Charset cs) {
super(cs, 1, 3, new byte[] { (byte) '?' });
}
protected CoderResult encodeLoop(CharBuffer in, ByteBuffer out) {
while (in.remaining() > 0) {
out.put((byte) in.get());
// out.put((byte) '!');
}
return CoderResult.UNDERFLOW;
}
}
static class MockDecoder extends java.nio.charset.CharsetDecoder {
public MockDecoder(Charset cs) {
super(cs, 1, 10);
}
protected CoderResult decodeLoop(ByteBuffer in, CharBuffer out) {
while (in.remaining() > 0) {
out.put((char) in.get());
}
return CoderResult.UNDERFLOW;
}
}
// Test the method isSupported(String) with charset supported by multiple providers.
/* j2objc: b/139491456
public void testIsSupported_And_ForName_NormalProvider() throws Exception {
SettableCharsetProvider.setDelegate(new MockCharsetProvider());
try {
assertTrue(Charset.isSupported("mockCharset10"));
// ignore case problem in mock, intended
assertTrue(Charset.isSupported("MockCharset11"));
assertTrue(Charset.isSupported("MockCharset12"));
assertTrue(Charset.isSupported("MOCKCharset10"));
// intended case problem in mock
assertTrue(Charset.isSupported("MOCKCharset11"));
assertTrue(Charset.isSupported("MOCKCharset12"));
assertTrue(Charset.forName("mockCharset10") instanceof MockCharset);
assertTrue(Charset.forName("mockCharset11") instanceof MockCharset);
assertTrue(Charset.forName("mockCharset12") instanceof MockCharset);
assertTrue(Charset.forName("mockCharset10") == charset2);
// intended case problem in mock
Charset.forName("mockCharset11");
assertTrue(Charset.forName("mockCharset12") == charset2);
} finally {
SettableCharsetProvider.clearDelegate();
}
}
*/
// Test the method availableCharsets() with charset supported by multiple providers.
/* j2objc: b/139491456
public void testAvailableCharsets_NormalProvider() throws Exception {
SettableCharsetProvider.setDelegate(new MockCharsetProvider());
try {
assertTrue(Charset.availableCharsets().containsKey("mockCharset00"));
assertTrue(Charset.availableCharsets().containsKey("MOCKCharset00"));
assertTrue(Charset.availableCharsets().get("mockCharset00") instanceof MockCharset);
assertTrue(Charset.availableCharsets().get("MOCKCharset00") instanceof MockCharset);
assertFalse(Charset.availableCharsets().containsKey("mockCharset01"));
assertFalse(Charset.availableCharsets().containsKey("mockCharset02"));
assertTrue(Charset.availableCharsets().get("mockCharset10") == charset2);
assertTrue(Charset.availableCharsets().get("MOCKCharset10") == charset2);
assertFalse(Charset.availableCharsets().containsKey("mockCharset11"));
assertFalse(Charset.availableCharsets().containsKey("mockCharset12"));
assertTrue(Charset.availableCharsets().containsKey("mockCharset10"));
assertTrue(Charset.availableCharsets().containsKey("MOCKCharset10"));
assertTrue(Charset.availableCharsets().get("mockCharset10") == charset2);
assertFalse(Charset.availableCharsets().containsKey("mockCharset11"));
assertFalse(Charset.availableCharsets().containsKey("mockCharset12"));
} finally {
SettableCharsetProvider.clearDelegate();
}
}
*/
// Test the method forName(String) when the charset provider supports a
// built-in charset.
public void testForName_DuplicateWithBuiltInCharset() throws Exception {
SettableCharsetProvider.setDelegate(new MockCharsetProviderASCII());
try {
assertFalse(Charset.forName("us-ascii") instanceof MockCharset);
assertFalse(Charset.availableCharsets().get("us-ascii") instanceof MockCharset);
} finally {
SettableCharsetProvider.clearDelegate();
}
}
// Fails on Android with a StackOverflowException.
public void testForName_withProviderWithRecursiveCall() throws Exception {
SettableCharsetProvider.setDelegate(new MockCharsetProviderWithRecursiveCall());
try {
Charset.forName("poop");
fail();
} catch (UnsupportedCharsetException expected) {
} finally {
SettableCharsetProvider.clearDelegate();
}
}
public static class MockCharsetProviderWithRecursiveCall extends CharsetProvider {
@Override
public Iterator<Charset> charsets() {
return null;
}
@Override
public Charset charsetForName(String charsetName) {
if (Charset.isSupported(charsetName)) {
return Charset.forName(charsetName);
}
return null;
}
}
public static class MockCharsetProvider extends CharsetProvider {
public Charset charsetForName(String charsetName) {
if ("MockCharset00".equalsIgnoreCase(charsetName) ||
"MockCharset01".equalsIgnoreCase(charsetName) ||
"MockCharset02".equalsIgnoreCase(charsetName)) {
return charset1;
} else if ("MockCharset10".equalsIgnoreCase(charsetName) ||
"MockCharset11".equalsIgnoreCase(charsetName) ||
"MockCharset12".equalsIgnoreCase(charsetName)) {
return charset2;
}
return null;
}
public Iterator charsets() {
Vector v = new Vector();
v.add(charset1);
v.add(charset2);
return v.iterator();
}
}
// Another mock charset provider attempting to provide the built-in charset "ascii" again.
public static class MockCharsetProviderASCII extends CharsetProvider {
public Charset charsetForName(String charsetName) {
if ("US-ASCII".equalsIgnoreCase(charsetName) || "ASCII".equalsIgnoreCase(charsetName)) {
return new MockCharset("US-ASCII", new String[] { "ASCII" });
}
return null;
}
public Iterator charsets() {
Vector v = new Vector();
v.add(new MockCharset("US-ASCII", new String[] { "ASCII" }));
return v.iterator();
}
}
}
|
googleapis/google-cloud-java | 36,079 | java-aiplatform/google-cloud-aiplatform/src/main/java/com/google/cloud/aiplatform/v1beta1/MetadataServiceSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.aiplatform.v1beta1;
import static com.google.cloud.aiplatform.v1beta1.MetadataServiceClient.ListArtifactsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.MetadataServiceClient.ListContextsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.MetadataServiceClient.ListExecutionsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.MetadataServiceClient.ListLocationsPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.MetadataServiceClient.ListMetadataSchemasPagedResponse;
import static com.google.cloud.aiplatform.v1beta1.MetadataServiceClient.ListMetadataStoresPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.ClientSettings;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.cloud.aiplatform.v1beta1.stub.MetadataServiceStubSettings;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link MetadataServiceClient}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (aiplatform.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getMetadataStore:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* MetadataServiceSettings.Builder metadataServiceSettingsBuilder =
* MetadataServiceSettings.newBuilder();
* metadataServiceSettingsBuilder
* .getMetadataStoreSettings()
* .setRetrySettings(
* metadataServiceSettingsBuilder
* .getMetadataStoreSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* MetadataServiceSettings metadataServiceSettings = metadataServiceSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for createMetadataStore:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* MetadataServiceSettings.Builder metadataServiceSettingsBuilder =
* MetadataServiceSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* metadataServiceSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@BetaApi
@Generated("by gapic-generator-java")
public class MetadataServiceSettings extends ClientSettings<MetadataServiceSettings> {
/** Returns the object with the settings used for calls to createMetadataStore. */
public UnaryCallSettings<CreateMetadataStoreRequest, Operation> createMetadataStoreSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).createMetadataStoreSettings();
}
/** Returns the object with the settings used for calls to createMetadataStore. */
public OperationCallSettings<
CreateMetadataStoreRequest, MetadataStore, CreateMetadataStoreOperationMetadata>
createMetadataStoreOperationSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).createMetadataStoreOperationSettings();
}
/** Returns the object with the settings used for calls to getMetadataStore. */
public UnaryCallSettings<GetMetadataStoreRequest, MetadataStore> getMetadataStoreSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).getMetadataStoreSettings();
}
/** Returns the object with the settings used for calls to listMetadataStores. */
public PagedCallSettings<
ListMetadataStoresRequest, ListMetadataStoresResponse, ListMetadataStoresPagedResponse>
listMetadataStoresSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).listMetadataStoresSettings();
}
/** Returns the object with the settings used for calls to deleteMetadataStore. */
public UnaryCallSettings<DeleteMetadataStoreRequest, Operation> deleteMetadataStoreSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).deleteMetadataStoreSettings();
}
/** Returns the object with the settings used for calls to deleteMetadataStore. */
public OperationCallSettings<
DeleteMetadataStoreRequest, Empty, DeleteMetadataStoreOperationMetadata>
deleteMetadataStoreOperationSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).deleteMetadataStoreOperationSettings();
}
/** Returns the object with the settings used for calls to createArtifact. */
public UnaryCallSettings<CreateArtifactRequest, Artifact> createArtifactSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).createArtifactSettings();
}
/** Returns the object with the settings used for calls to getArtifact. */
public UnaryCallSettings<GetArtifactRequest, Artifact> getArtifactSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).getArtifactSettings();
}
/** Returns the object with the settings used for calls to listArtifacts. */
public PagedCallSettings<ListArtifactsRequest, ListArtifactsResponse, ListArtifactsPagedResponse>
listArtifactsSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).listArtifactsSettings();
}
/** Returns the object with the settings used for calls to updateArtifact. */
public UnaryCallSettings<UpdateArtifactRequest, Artifact> updateArtifactSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).updateArtifactSettings();
}
/** Returns the object with the settings used for calls to deleteArtifact. */
public UnaryCallSettings<DeleteArtifactRequest, Operation> deleteArtifactSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).deleteArtifactSettings();
}
/** Returns the object with the settings used for calls to deleteArtifact. */
public OperationCallSettings<DeleteArtifactRequest, Empty, DeleteOperationMetadata>
deleteArtifactOperationSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).deleteArtifactOperationSettings();
}
/** Returns the object with the settings used for calls to purgeArtifacts. */
public UnaryCallSettings<PurgeArtifactsRequest, Operation> purgeArtifactsSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).purgeArtifactsSettings();
}
/** Returns the object with the settings used for calls to purgeArtifacts. */
public OperationCallSettings<
PurgeArtifactsRequest, PurgeArtifactsResponse, PurgeArtifactsMetadata>
purgeArtifactsOperationSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).purgeArtifactsOperationSettings();
}
/** Returns the object with the settings used for calls to createContext. */
public UnaryCallSettings<CreateContextRequest, Context> createContextSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).createContextSettings();
}
/** Returns the object with the settings used for calls to getContext. */
public UnaryCallSettings<GetContextRequest, Context> getContextSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).getContextSettings();
}
/** Returns the object with the settings used for calls to listContexts. */
public PagedCallSettings<ListContextsRequest, ListContextsResponse, ListContextsPagedResponse>
listContextsSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).listContextsSettings();
}
/** Returns the object with the settings used for calls to updateContext. */
public UnaryCallSettings<UpdateContextRequest, Context> updateContextSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).updateContextSettings();
}
/** Returns the object with the settings used for calls to deleteContext. */
public UnaryCallSettings<DeleteContextRequest, Operation> deleteContextSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).deleteContextSettings();
}
/** Returns the object with the settings used for calls to deleteContext. */
public OperationCallSettings<DeleteContextRequest, Empty, DeleteOperationMetadata>
deleteContextOperationSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).deleteContextOperationSettings();
}
/** Returns the object with the settings used for calls to purgeContexts. */
public UnaryCallSettings<PurgeContextsRequest, Operation> purgeContextsSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).purgeContextsSettings();
}
/** Returns the object with the settings used for calls to purgeContexts. */
public OperationCallSettings<PurgeContextsRequest, PurgeContextsResponse, PurgeContextsMetadata>
purgeContextsOperationSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).purgeContextsOperationSettings();
}
/** Returns the object with the settings used for calls to addContextArtifactsAndExecutions. */
public UnaryCallSettings<
AddContextArtifactsAndExecutionsRequest, AddContextArtifactsAndExecutionsResponse>
addContextArtifactsAndExecutionsSettings() {
return ((MetadataServiceStubSettings) getStubSettings())
.addContextArtifactsAndExecutionsSettings();
}
/** Returns the object with the settings used for calls to addContextChildren. */
public UnaryCallSettings<AddContextChildrenRequest, AddContextChildrenResponse>
addContextChildrenSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).addContextChildrenSettings();
}
/** Returns the object with the settings used for calls to removeContextChildren. */
public UnaryCallSettings<RemoveContextChildrenRequest, RemoveContextChildrenResponse>
removeContextChildrenSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).removeContextChildrenSettings();
}
/** Returns the object with the settings used for calls to queryContextLineageSubgraph. */
public UnaryCallSettings<QueryContextLineageSubgraphRequest, LineageSubgraph>
queryContextLineageSubgraphSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).queryContextLineageSubgraphSettings();
}
/** Returns the object with the settings used for calls to createExecution. */
public UnaryCallSettings<CreateExecutionRequest, Execution> createExecutionSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).createExecutionSettings();
}
/** Returns the object with the settings used for calls to getExecution. */
public UnaryCallSettings<GetExecutionRequest, Execution> getExecutionSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).getExecutionSettings();
}
/** Returns the object with the settings used for calls to listExecutions. */
public PagedCallSettings<
ListExecutionsRequest, ListExecutionsResponse, ListExecutionsPagedResponse>
listExecutionsSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).listExecutionsSettings();
}
/** Returns the object with the settings used for calls to updateExecution. */
public UnaryCallSettings<UpdateExecutionRequest, Execution> updateExecutionSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).updateExecutionSettings();
}
/** Returns the object with the settings used for calls to deleteExecution. */
public UnaryCallSettings<DeleteExecutionRequest, Operation> deleteExecutionSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).deleteExecutionSettings();
}
/** Returns the object with the settings used for calls to deleteExecution. */
public OperationCallSettings<DeleteExecutionRequest, Empty, DeleteOperationMetadata>
deleteExecutionOperationSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).deleteExecutionOperationSettings();
}
/** Returns the object with the settings used for calls to purgeExecutions. */
public UnaryCallSettings<PurgeExecutionsRequest, Operation> purgeExecutionsSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).purgeExecutionsSettings();
}
/** Returns the object with the settings used for calls to purgeExecutions. */
public OperationCallSettings<
PurgeExecutionsRequest, PurgeExecutionsResponse, PurgeExecutionsMetadata>
purgeExecutionsOperationSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).purgeExecutionsOperationSettings();
}
/** Returns the object with the settings used for calls to addExecutionEvents. */
public UnaryCallSettings<AddExecutionEventsRequest, AddExecutionEventsResponse>
addExecutionEventsSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).addExecutionEventsSettings();
}
/** Returns the object with the settings used for calls to queryExecutionInputsAndOutputs. */
public UnaryCallSettings<QueryExecutionInputsAndOutputsRequest, LineageSubgraph>
queryExecutionInputsAndOutputsSettings() {
return ((MetadataServiceStubSettings) getStubSettings())
.queryExecutionInputsAndOutputsSettings();
}
/** Returns the object with the settings used for calls to createMetadataSchema. */
public UnaryCallSettings<CreateMetadataSchemaRequest, MetadataSchema>
createMetadataSchemaSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).createMetadataSchemaSettings();
}
/** Returns the object with the settings used for calls to getMetadataSchema. */
public UnaryCallSettings<GetMetadataSchemaRequest, MetadataSchema> getMetadataSchemaSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).getMetadataSchemaSettings();
}
/** Returns the object with the settings used for calls to listMetadataSchemas. */
public PagedCallSettings<
ListMetadataSchemasRequest, ListMetadataSchemasResponse, ListMetadataSchemasPagedResponse>
listMetadataSchemasSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).listMetadataSchemasSettings();
}
/** Returns the object with the settings used for calls to queryArtifactLineageSubgraph. */
public UnaryCallSettings<QueryArtifactLineageSubgraphRequest, LineageSubgraph>
queryArtifactLineageSubgraphSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).queryArtifactLineageSubgraphSettings();
}
/** Returns the object with the settings used for calls to listLocations. */
public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).listLocationsSettings();
}
/** Returns the object with the settings used for calls to getLocation. */
public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).getLocationSettings();
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyRequest, Policy> setIamPolicySettings() {
return ((MetadataServiceStubSettings) getStubSettings()).setIamPolicySettings();
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyRequest, Policy> getIamPolicySettings() {
return ((MetadataServiceStubSettings) getStubSettings()).getIamPolicySettings();
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings() {
return ((MetadataServiceStubSettings) getStubSettings()).testIamPermissionsSettings();
}
public static final MetadataServiceSettings create(MetadataServiceStubSettings stub)
throws IOException {
return new MetadataServiceSettings.Builder(stub.toBuilder()).build();
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return MetadataServiceStubSettings.defaultExecutorProviderBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return MetadataServiceStubSettings.getDefaultEndpoint();
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return MetadataServiceStubSettings.getDefaultServiceScopes();
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return MetadataServiceStubSettings.defaultCredentialsProviderBuilder();
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return MetadataServiceStubSettings.defaultGrpcTransportProviderBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return MetadataServiceStubSettings.defaultTransportChannelProvider();
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return MetadataServiceStubSettings.defaultApiClientHeaderProviderBuilder();
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected MetadataServiceSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
}
/** Builder for MetadataServiceSettings. */
public static class Builder extends ClientSettings.Builder<MetadataServiceSettings, Builder> {
protected Builder() throws IOException {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(MetadataServiceStubSettings.newBuilder(clientContext));
}
protected Builder(MetadataServiceSettings settings) {
super(settings.getStubSettings().toBuilder());
}
protected Builder(MetadataServiceStubSettings.Builder stubSettings) {
super(stubSettings);
}
private static Builder createDefault() {
return new Builder(MetadataServiceStubSettings.newBuilder());
}
public MetadataServiceStubSettings.Builder getStubSettingsBuilder() {
return ((MetadataServiceStubSettings.Builder) getStubSettings());
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(
getStubSettingsBuilder().unaryMethodSettingsBuilders(), settingsUpdater);
return this;
}
/** Returns the builder for the settings used for calls to createMetadataStore. */
public UnaryCallSettings.Builder<CreateMetadataStoreRequest, Operation>
createMetadataStoreSettings() {
return getStubSettingsBuilder().createMetadataStoreSettings();
}
/** Returns the builder for the settings used for calls to createMetadataStore. */
public OperationCallSettings.Builder<
CreateMetadataStoreRequest, MetadataStore, CreateMetadataStoreOperationMetadata>
createMetadataStoreOperationSettings() {
return getStubSettingsBuilder().createMetadataStoreOperationSettings();
}
/** Returns the builder for the settings used for calls to getMetadataStore. */
public UnaryCallSettings.Builder<GetMetadataStoreRequest, MetadataStore>
getMetadataStoreSettings() {
return getStubSettingsBuilder().getMetadataStoreSettings();
}
/** Returns the builder for the settings used for calls to listMetadataStores. */
public PagedCallSettings.Builder<
ListMetadataStoresRequest, ListMetadataStoresResponse, ListMetadataStoresPagedResponse>
listMetadataStoresSettings() {
return getStubSettingsBuilder().listMetadataStoresSettings();
}
/** Returns the builder for the settings used for calls to deleteMetadataStore. */
public UnaryCallSettings.Builder<DeleteMetadataStoreRequest, Operation>
deleteMetadataStoreSettings() {
return getStubSettingsBuilder().deleteMetadataStoreSettings();
}
/** Returns the builder for the settings used for calls to deleteMetadataStore. */
public OperationCallSettings.Builder<
DeleteMetadataStoreRequest, Empty, DeleteMetadataStoreOperationMetadata>
deleteMetadataStoreOperationSettings() {
return getStubSettingsBuilder().deleteMetadataStoreOperationSettings();
}
/** Returns the builder for the settings used for calls to createArtifact. */
public UnaryCallSettings.Builder<CreateArtifactRequest, Artifact> createArtifactSettings() {
return getStubSettingsBuilder().createArtifactSettings();
}
/** Returns the builder for the settings used for calls to getArtifact. */
public UnaryCallSettings.Builder<GetArtifactRequest, Artifact> getArtifactSettings() {
return getStubSettingsBuilder().getArtifactSettings();
}
/** Returns the builder for the settings used for calls to listArtifacts. */
public PagedCallSettings.Builder<
ListArtifactsRequest, ListArtifactsResponse, ListArtifactsPagedResponse>
listArtifactsSettings() {
return getStubSettingsBuilder().listArtifactsSettings();
}
/** Returns the builder for the settings used for calls to updateArtifact. */
public UnaryCallSettings.Builder<UpdateArtifactRequest, Artifact> updateArtifactSettings() {
return getStubSettingsBuilder().updateArtifactSettings();
}
/** Returns the builder for the settings used for calls to deleteArtifact. */
public UnaryCallSettings.Builder<DeleteArtifactRequest, Operation> deleteArtifactSettings() {
return getStubSettingsBuilder().deleteArtifactSettings();
}
/** Returns the builder for the settings used for calls to deleteArtifact. */
public OperationCallSettings.Builder<DeleteArtifactRequest, Empty, DeleteOperationMetadata>
deleteArtifactOperationSettings() {
return getStubSettingsBuilder().deleteArtifactOperationSettings();
}
/** Returns the builder for the settings used for calls to purgeArtifacts. */
public UnaryCallSettings.Builder<PurgeArtifactsRequest, Operation> purgeArtifactsSettings() {
return getStubSettingsBuilder().purgeArtifactsSettings();
}
/** Returns the builder for the settings used for calls to purgeArtifacts. */
public OperationCallSettings.Builder<
PurgeArtifactsRequest, PurgeArtifactsResponse, PurgeArtifactsMetadata>
purgeArtifactsOperationSettings() {
return getStubSettingsBuilder().purgeArtifactsOperationSettings();
}
/** Returns the builder for the settings used for calls to createContext. */
public UnaryCallSettings.Builder<CreateContextRequest, Context> createContextSettings() {
return getStubSettingsBuilder().createContextSettings();
}
/** Returns the builder for the settings used for calls to getContext. */
public UnaryCallSettings.Builder<GetContextRequest, Context> getContextSettings() {
return getStubSettingsBuilder().getContextSettings();
}
/** Returns the builder for the settings used for calls to listContexts. */
public PagedCallSettings.Builder<
ListContextsRequest, ListContextsResponse, ListContextsPagedResponse>
listContextsSettings() {
return getStubSettingsBuilder().listContextsSettings();
}
/** Returns the builder for the settings used for calls to updateContext. */
public UnaryCallSettings.Builder<UpdateContextRequest, Context> updateContextSettings() {
return getStubSettingsBuilder().updateContextSettings();
}
/** Returns the builder for the settings used for calls to deleteContext. */
public UnaryCallSettings.Builder<DeleteContextRequest, Operation> deleteContextSettings() {
return getStubSettingsBuilder().deleteContextSettings();
}
/** Returns the builder for the settings used for calls to deleteContext. */
public OperationCallSettings.Builder<DeleteContextRequest, Empty, DeleteOperationMetadata>
deleteContextOperationSettings() {
return getStubSettingsBuilder().deleteContextOperationSettings();
}
/** Returns the builder for the settings used for calls to purgeContexts. */
public UnaryCallSettings.Builder<PurgeContextsRequest, Operation> purgeContextsSettings() {
return getStubSettingsBuilder().purgeContextsSettings();
}
/** Returns the builder for the settings used for calls to purgeContexts. */
public OperationCallSettings.Builder<
PurgeContextsRequest, PurgeContextsResponse, PurgeContextsMetadata>
purgeContextsOperationSettings() {
return getStubSettingsBuilder().purgeContextsOperationSettings();
}
/** Returns the builder for the settings used for calls to addContextArtifactsAndExecutions. */
public UnaryCallSettings.Builder<
AddContextArtifactsAndExecutionsRequest, AddContextArtifactsAndExecutionsResponse>
addContextArtifactsAndExecutionsSettings() {
return getStubSettingsBuilder().addContextArtifactsAndExecutionsSettings();
}
/** Returns the builder for the settings used for calls to addContextChildren. */
public UnaryCallSettings.Builder<AddContextChildrenRequest, AddContextChildrenResponse>
addContextChildrenSettings() {
return getStubSettingsBuilder().addContextChildrenSettings();
}
/** Returns the builder for the settings used for calls to removeContextChildren. */
public UnaryCallSettings.Builder<RemoveContextChildrenRequest, RemoveContextChildrenResponse>
removeContextChildrenSettings() {
return getStubSettingsBuilder().removeContextChildrenSettings();
}
/** Returns the builder for the settings used for calls to queryContextLineageSubgraph. */
public UnaryCallSettings.Builder<QueryContextLineageSubgraphRequest, LineageSubgraph>
queryContextLineageSubgraphSettings() {
return getStubSettingsBuilder().queryContextLineageSubgraphSettings();
}
/** Returns the builder for the settings used for calls to createExecution. */
public UnaryCallSettings.Builder<CreateExecutionRequest, Execution> createExecutionSettings() {
return getStubSettingsBuilder().createExecutionSettings();
}
/** Returns the builder for the settings used for calls to getExecution. */
public UnaryCallSettings.Builder<GetExecutionRequest, Execution> getExecutionSettings() {
return getStubSettingsBuilder().getExecutionSettings();
}
/** Returns the builder for the settings used for calls to listExecutions. */
public PagedCallSettings.Builder<
ListExecutionsRequest, ListExecutionsResponse, ListExecutionsPagedResponse>
listExecutionsSettings() {
return getStubSettingsBuilder().listExecutionsSettings();
}
/** Returns the builder for the settings used for calls to updateExecution. */
public UnaryCallSettings.Builder<UpdateExecutionRequest, Execution> updateExecutionSettings() {
return getStubSettingsBuilder().updateExecutionSettings();
}
/** Returns the builder for the settings used for calls to deleteExecution. */
public UnaryCallSettings.Builder<DeleteExecutionRequest, Operation> deleteExecutionSettings() {
return getStubSettingsBuilder().deleteExecutionSettings();
}
/** Returns the builder for the settings used for calls to deleteExecution. */
public OperationCallSettings.Builder<DeleteExecutionRequest, Empty, DeleteOperationMetadata>
deleteExecutionOperationSettings() {
return getStubSettingsBuilder().deleteExecutionOperationSettings();
}
/** Returns the builder for the settings used for calls to purgeExecutions. */
public UnaryCallSettings.Builder<PurgeExecutionsRequest, Operation> purgeExecutionsSettings() {
return getStubSettingsBuilder().purgeExecutionsSettings();
}
/** Returns the builder for the settings used for calls to purgeExecutions. */
public OperationCallSettings.Builder<
PurgeExecutionsRequest, PurgeExecutionsResponse, PurgeExecutionsMetadata>
purgeExecutionsOperationSettings() {
return getStubSettingsBuilder().purgeExecutionsOperationSettings();
}
/** Returns the builder for the settings used for calls to addExecutionEvents. */
public UnaryCallSettings.Builder<AddExecutionEventsRequest, AddExecutionEventsResponse>
addExecutionEventsSettings() {
return getStubSettingsBuilder().addExecutionEventsSettings();
}
/** Returns the builder for the settings used for calls to queryExecutionInputsAndOutputs. */
public UnaryCallSettings.Builder<QueryExecutionInputsAndOutputsRequest, LineageSubgraph>
queryExecutionInputsAndOutputsSettings() {
return getStubSettingsBuilder().queryExecutionInputsAndOutputsSettings();
}
/** Returns the builder for the settings used for calls to createMetadataSchema. */
public UnaryCallSettings.Builder<CreateMetadataSchemaRequest, MetadataSchema>
createMetadataSchemaSettings() {
return getStubSettingsBuilder().createMetadataSchemaSettings();
}
/** Returns the builder for the settings used for calls to getMetadataSchema. */
public UnaryCallSettings.Builder<GetMetadataSchemaRequest, MetadataSchema>
getMetadataSchemaSettings() {
return getStubSettingsBuilder().getMetadataSchemaSettings();
}
/** Returns the builder for the settings used for calls to listMetadataSchemas. */
public PagedCallSettings.Builder<
ListMetadataSchemasRequest,
ListMetadataSchemasResponse,
ListMetadataSchemasPagedResponse>
listMetadataSchemasSettings() {
return getStubSettingsBuilder().listMetadataSchemasSettings();
}
/** Returns the builder for the settings used for calls to queryArtifactLineageSubgraph. */
public UnaryCallSettings.Builder<QueryArtifactLineageSubgraphRequest, LineageSubgraph>
queryArtifactLineageSubgraphSettings() {
return getStubSettingsBuilder().queryArtifactLineageSubgraphSettings();
}
/** Returns the builder for the settings used for calls to listLocations. */
public PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return getStubSettingsBuilder().listLocationsSettings();
}
/** Returns the builder for the settings used for calls to getLocation. */
public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() {
return getStubSettingsBuilder().getLocationSettings();
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyRequest, Policy> setIamPolicySettings() {
return getStubSettingsBuilder().setIamPolicySettings();
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyRequest, Policy> getIamPolicySettings() {
return getStubSettingsBuilder().getIamPolicySettings();
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsSettings() {
return getStubSettingsBuilder().testIamPermissionsSettings();
}
@Override
public MetadataServiceSettings build() throws IOException {
return new MetadataServiceSettings(this);
}
}
}
|
googleapis/google-cloud-java | 35,802 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ToolCallValidResults.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Results for tool call valid metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ToolCallValidResults}
*/
public final class ToolCallValidResults extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ToolCallValidResults)
ToolCallValidResultsOrBuilder {
private static final long serialVersionUID = 0L;
// Use ToolCallValidResults.newBuilder() to construct.
private ToolCallValidResults(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ToolCallValidResults() {
toolCallValidMetricValues_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ToolCallValidResults();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolCallValidResults_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolCallValidResults_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ToolCallValidResults.class,
com.google.cloud.aiplatform.v1.ToolCallValidResults.Builder.class);
}
public static final int TOOL_CALL_VALID_METRIC_VALUES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.ToolCallValidMetricValue>
toolCallValidMetricValues_;
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.ToolCallValidMetricValue>
getToolCallValidMetricValuesList() {
return toolCallValidMetricValues_;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.ToolCallValidMetricValueOrBuilder>
getToolCallValidMetricValuesOrBuilderList() {
return toolCallValidMetricValues_;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public int getToolCallValidMetricValuesCount() {
return toolCallValidMetricValues_.size();
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolCallValidMetricValue getToolCallValidMetricValues(
int index) {
return toolCallValidMetricValues_.get(index);
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolCallValidMetricValueOrBuilder
getToolCallValidMetricValuesOrBuilder(int index) {
return toolCallValidMetricValues_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < toolCallValidMetricValues_.size(); i++) {
output.writeMessage(1, toolCallValidMetricValues_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < toolCallValidMetricValues_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, toolCallValidMetricValues_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ToolCallValidResults)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ToolCallValidResults other =
(com.google.cloud.aiplatform.v1.ToolCallValidResults) obj;
if (!getToolCallValidMetricValuesList().equals(other.getToolCallValidMetricValuesList()))
return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getToolCallValidMetricValuesCount() > 0) {
hash = (37 * hash) + TOOL_CALL_VALID_METRIC_VALUES_FIELD_NUMBER;
hash = (53 * hash) + getToolCallValidMetricValuesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ToolCallValidResults prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Results for tool call valid metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ToolCallValidResults}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ToolCallValidResults)
com.google.cloud.aiplatform.v1.ToolCallValidResultsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolCallValidResults_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolCallValidResults_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ToolCallValidResults.class,
com.google.cloud.aiplatform.v1.ToolCallValidResults.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ToolCallValidResults.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (toolCallValidMetricValuesBuilder_ == null) {
toolCallValidMetricValues_ = java.util.Collections.emptyList();
} else {
toolCallValidMetricValues_ = null;
toolCallValidMetricValuesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolCallValidResults_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolCallValidResults getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ToolCallValidResults.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolCallValidResults build() {
com.google.cloud.aiplatform.v1.ToolCallValidResults result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolCallValidResults buildPartial() {
com.google.cloud.aiplatform.v1.ToolCallValidResults result =
new com.google.cloud.aiplatform.v1.ToolCallValidResults(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ToolCallValidResults result) {
if (toolCallValidMetricValuesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
toolCallValidMetricValues_ =
java.util.Collections.unmodifiableList(toolCallValidMetricValues_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.toolCallValidMetricValues_ = toolCallValidMetricValues_;
} else {
result.toolCallValidMetricValues_ = toolCallValidMetricValuesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ToolCallValidResults result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ToolCallValidResults) {
return mergeFrom((com.google.cloud.aiplatform.v1.ToolCallValidResults) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ToolCallValidResults other) {
if (other == com.google.cloud.aiplatform.v1.ToolCallValidResults.getDefaultInstance())
return this;
if (toolCallValidMetricValuesBuilder_ == null) {
if (!other.toolCallValidMetricValues_.isEmpty()) {
if (toolCallValidMetricValues_.isEmpty()) {
toolCallValidMetricValues_ = other.toolCallValidMetricValues_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureToolCallValidMetricValuesIsMutable();
toolCallValidMetricValues_.addAll(other.toolCallValidMetricValues_);
}
onChanged();
}
} else {
if (!other.toolCallValidMetricValues_.isEmpty()) {
if (toolCallValidMetricValuesBuilder_.isEmpty()) {
toolCallValidMetricValuesBuilder_.dispose();
toolCallValidMetricValuesBuilder_ = null;
toolCallValidMetricValues_ = other.toolCallValidMetricValues_;
bitField0_ = (bitField0_ & ~0x00000001);
toolCallValidMetricValuesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getToolCallValidMetricValuesFieldBuilder()
: null;
} else {
toolCallValidMetricValuesBuilder_.addAllMessages(other.toolCallValidMetricValues_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue m =
input.readMessage(
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.parser(),
extensionRegistry);
if (toolCallValidMetricValuesBuilder_ == null) {
ensureToolCallValidMetricValuesIsMutable();
toolCallValidMetricValues_.add(m);
} else {
toolCallValidMetricValuesBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.ToolCallValidMetricValue>
toolCallValidMetricValues_ = java.util.Collections.emptyList();
private void ensureToolCallValidMetricValuesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
toolCallValidMetricValues_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.ToolCallValidMetricValue>(
toolCallValidMetricValues_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue,
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.Builder,
com.google.cloud.aiplatform.v1.ToolCallValidMetricValueOrBuilder>
toolCallValidMetricValuesBuilder_;
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.ToolCallValidMetricValue>
getToolCallValidMetricValuesList() {
if (toolCallValidMetricValuesBuilder_ == null) {
return java.util.Collections.unmodifiableList(toolCallValidMetricValues_);
} else {
return toolCallValidMetricValuesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public int getToolCallValidMetricValuesCount() {
if (toolCallValidMetricValuesBuilder_ == null) {
return toolCallValidMetricValues_.size();
} else {
return toolCallValidMetricValuesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolCallValidMetricValue getToolCallValidMetricValues(
int index) {
if (toolCallValidMetricValuesBuilder_ == null) {
return toolCallValidMetricValues_.get(index);
} else {
return toolCallValidMetricValuesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setToolCallValidMetricValues(
int index, com.google.cloud.aiplatform.v1.ToolCallValidMetricValue value) {
if (toolCallValidMetricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToolCallValidMetricValuesIsMutable();
toolCallValidMetricValues_.set(index, value);
onChanged();
} else {
toolCallValidMetricValuesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setToolCallValidMetricValues(
int index,
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.Builder builderForValue) {
if (toolCallValidMetricValuesBuilder_ == null) {
ensureToolCallValidMetricValuesIsMutable();
toolCallValidMetricValues_.set(index, builderForValue.build());
onChanged();
} else {
toolCallValidMetricValuesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolCallValidMetricValues(
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue value) {
if (toolCallValidMetricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToolCallValidMetricValuesIsMutable();
toolCallValidMetricValues_.add(value);
onChanged();
} else {
toolCallValidMetricValuesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolCallValidMetricValues(
int index, com.google.cloud.aiplatform.v1.ToolCallValidMetricValue value) {
if (toolCallValidMetricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToolCallValidMetricValuesIsMutable();
toolCallValidMetricValues_.add(index, value);
onChanged();
} else {
toolCallValidMetricValuesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolCallValidMetricValues(
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.Builder builderForValue) {
if (toolCallValidMetricValuesBuilder_ == null) {
ensureToolCallValidMetricValuesIsMutable();
toolCallValidMetricValues_.add(builderForValue.build());
onChanged();
} else {
toolCallValidMetricValuesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolCallValidMetricValues(
int index,
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.Builder builderForValue) {
if (toolCallValidMetricValuesBuilder_ == null) {
ensureToolCallValidMetricValuesIsMutable();
toolCallValidMetricValues_.add(index, builderForValue.build());
onChanged();
} else {
toolCallValidMetricValuesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addAllToolCallValidMetricValues(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.ToolCallValidMetricValue>
values) {
if (toolCallValidMetricValuesBuilder_ == null) {
ensureToolCallValidMetricValuesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, toolCallValidMetricValues_);
onChanged();
} else {
toolCallValidMetricValuesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearToolCallValidMetricValues() {
if (toolCallValidMetricValuesBuilder_ == null) {
toolCallValidMetricValues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
toolCallValidMetricValuesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder removeToolCallValidMetricValues(int index) {
if (toolCallValidMetricValuesBuilder_ == null) {
ensureToolCallValidMetricValuesIsMutable();
toolCallValidMetricValues_.remove(index);
onChanged();
} else {
toolCallValidMetricValuesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.Builder
getToolCallValidMetricValuesBuilder(int index) {
return getToolCallValidMetricValuesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolCallValidMetricValueOrBuilder
getToolCallValidMetricValuesOrBuilder(int index) {
if (toolCallValidMetricValuesBuilder_ == null) {
return toolCallValidMetricValues_.get(index);
} else {
return toolCallValidMetricValuesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<
? extends com.google.cloud.aiplatform.v1.ToolCallValidMetricValueOrBuilder>
getToolCallValidMetricValuesOrBuilderList() {
if (toolCallValidMetricValuesBuilder_ != null) {
return toolCallValidMetricValuesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(toolCallValidMetricValues_);
}
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.Builder
addToolCallValidMetricValuesBuilder() {
return getToolCallValidMetricValuesFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.Builder
addToolCallValidMetricValuesBuilder(int index) {
return getToolCallValidMetricValuesFieldBuilder()
.addBuilder(
index, com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. Tool call valid metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolCallValidMetricValue tool_call_valid_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.Builder>
getToolCallValidMetricValuesBuilderList() {
return getToolCallValidMetricValuesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue,
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.Builder,
com.google.cloud.aiplatform.v1.ToolCallValidMetricValueOrBuilder>
getToolCallValidMetricValuesFieldBuilder() {
if (toolCallValidMetricValuesBuilder_ == null) {
toolCallValidMetricValuesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue,
com.google.cloud.aiplatform.v1.ToolCallValidMetricValue.Builder,
com.google.cloud.aiplatform.v1.ToolCallValidMetricValueOrBuilder>(
toolCallValidMetricValues_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
toolCallValidMetricValues_ = null;
}
return toolCallValidMetricValuesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ToolCallValidResults)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ToolCallValidResults)
private static final com.google.cloud.aiplatform.v1.ToolCallValidResults DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ToolCallValidResults();
}
public static com.google.cloud.aiplatform.v1.ToolCallValidResults getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ToolCallValidResults> PARSER =
new com.google.protobuf.AbstractParser<ToolCallValidResults>() {
@java.lang.Override
public ToolCallValidResults parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ToolCallValidResults> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ToolCallValidResults> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolCallValidResults getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,802 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ToolNameMatchResults.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Results for tool name match metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ToolNameMatchResults}
*/
public final class ToolNameMatchResults extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ToolNameMatchResults)
ToolNameMatchResultsOrBuilder {
private static final long serialVersionUID = 0L;
// Use ToolNameMatchResults.newBuilder() to construct.
private ToolNameMatchResults(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ToolNameMatchResults() {
toolNameMatchMetricValues_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ToolNameMatchResults();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolNameMatchResults_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolNameMatchResults_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ToolNameMatchResults.class,
com.google.cloud.aiplatform.v1.ToolNameMatchResults.Builder.class);
}
public static final int TOOL_NAME_MATCH_METRIC_VALUES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue>
toolNameMatchMetricValues_;
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue>
getToolNameMatchMetricValuesList() {
return toolNameMatchMetricValues_;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.aiplatform.v1.ToolNameMatchMetricValueOrBuilder>
getToolNameMatchMetricValuesOrBuilderList() {
return toolNameMatchMetricValues_;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public int getToolNameMatchMetricValuesCount() {
return toolNameMatchMetricValues_.size();
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue getToolNameMatchMetricValues(
int index) {
return toolNameMatchMetricValues_.get(index);
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolNameMatchMetricValueOrBuilder
getToolNameMatchMetricValuesOrBuilder(int index) {
return toolNameMatchMetricValues_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < toolNameMatchMetricValues_.size(); i++) {
output.writeMessage(1, toolNameMatchMetricValues_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < toolNameMatchMetricValues_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, toolNameMatchMetricValues_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ToolNameMatchResults)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ToolNameMatchResults other =
(com.google.cloud.aiplatform.v1.ToolNameMatchResults) obj;
if (!getToolNameMatchMetricValuesList().equals(other.getToolNameMatchMetricValuesList()))
return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getToolNameMatchMetricValuesCount() > 0) {
hash = (37 * hash) + TOOL_NAME_MATCH_METRIC_VALUES_FIELD_NUMBER;
hash = (53 * hash) + getToolNameMatchMetricValuesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ToolNameMatchResults prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Results for tool name match metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ToolNameMatchResults}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ToolNameMatchResults)
com.google.cloud.aiplatform.v1.ToolNameMatchResultsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolNameMatchResults_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolNameMatchResults_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ToolNameMatchResults.class,
com.google.cloud.aiplatform.v1.ToolNameMatchResults.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ToolNameMatchResults.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (toolNameMatchMetricValuesBuilder_ == null) {
toolNameMatchMetricValues_ = java.util.Collections.emptyList();
} else {
toolNameMatchMetricValues_ = null;
toolNameMatchMetricValuesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_ToolNameMatchResults_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolNameMatchResults getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ToolNameMatchResults.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolNameMatchResults build() {
com.google.cloud.aiplatform.v1.ToolNameMatchResults result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolNameMatchResults buildPartial() {
com.google.cloud.aiplatform.v1.ToolNameMatchResults result =
new com.google.cloud.aiplatform.v1.ToolNameMatchResults(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.aiplatform.v1.ToolNameMatchResults result) {
if (toolNameMatchMetricValuesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
toolNameMatchMetricValues_ =
java.util.Collections.unmodifiableList(toolNameMatchMetricValues_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.toolNameMatchMetricValues_ = toolNameMatchMetricValues_;
} else {
result.toolNameMatchMetricValues_ = toolNameMatchMetricValuesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ToolNameMatchResults result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ToolNameMatchResults) {
return mergeFrom((com.google.cloud.aiplatform.v1.ToolNameMatchResults) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ToolNameMatchResults other) {
if (other == com.google.cloud.aiplatform.v1.ToolNameMatchResults.getDefaultInstance())
return this;
if (toolNameMatchMetricValuesBuilder_ == null) {
if (!other.toolNameMatchMetricValues_.isEmpty()) {
if (toolNameMatchMetricValues_.isEmpty()) {
toolNameMatchMetricValues_ = other.toolNameMatchMetricValues_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureToolNameMatchMetricValuesIsMutable();
toolNameMatchMetricValues_.addAll(other.toolNameMatchMetricValues_);
}
onChanged();
}
} else {
if (!other.toolNameMatchMetricValues_.isEmpty()) {
if (toolNameMatchMetricValuesBuilder_.isEmpty()) {
toolNameMatchMetricValuesBuilder_.dispose();
toolNameMatchMetricValuesBuilder_ = null;
toolNameMatchMetricValues_ = other.toolNameMatchMetricValues_;
bitField0_ = (bitField0_ & ~0x00000001);
toolNameMatchMetricValuesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getToolNameMatchMetricValuesFieldBuilder()
: null;
} else {
toolNameMatchMetricValuesBuilder_.addAllMessages(other.toolNameMatchMetricValues_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue m =
input.readMessage(
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.parser(),
extensionRegistry);
if (toolNameMatchMetricValuesBuilder_ == null) {
ensureToolNameMatchMetricValuesIsMutable();
toolNameMatchMetricValues_.add(m);
} else {
toolNameMatchMetricValuesBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue>
toolNameMatchMetricValues_ = java.util.Collections.emptyList();
private void ensureToolNameMatchMetricValuesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
toolNameMatchMetricValues_ =
new java.util.ArrayList<com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue>(
toolNameMatchMetricValues_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue,
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.Builder,
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValueOrBuilder>
toolNameMatchMetricValuesBuilder_;
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue>
getToolNameMatchMetricValuesList() {
if (toolNameMatchMetricValuesBuilder_ == null) {
return java.util.Collections.unmodifiableList(toolNameMatchMetricValues_);
} else {
return toolNameMatchMetricValuesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public int getToolNameMatchMetricValuesCount() {
if (toolNameMatchMetricValuesBuilder_ == null) {
return toolNameMatchMetricValues_.size();
} else {
return toolNameMatchMetricValuesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue getToolNameMatchMetricValues(
int index) {
if (toolNameMatchMetricValuesBuilder_ == null) {
return toolNameMatchMetricValues_.get(index);
} else {
return toolNameMatchMetricValuesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setToolNameMatchMetricValues(
int index, com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue value) {
if (toolNameMatchMetricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToolNameMatchMetricValuesIsMutable();
toolNameMatchMetricValues_.set(index, value);
onChanged();
} else {
toolNameMatchMetricValuesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder setToolNameMatchMetricValues(
int index,
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.Builder builderForValue) {
if (toolNameMatchMetricValuesBuilder_ == null) {
ensureToolNameMatchMetricValuesIsMutable();
toolNameMatchMetricValues_.set(index, builderForValue.build());
onChanged();
} else {
toolNameMatchMetricValuesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolNameMatchMetricValues(
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue value) {
if (toolNameMatchMetricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToolNameMatchMetricValuesIsMutable();
toolNameMatchMetricValues_.add(value);
onChanged();
} else {
toolNameMatchMetricValuesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolNameMatchMetricValues(
int index, com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue value) {
if (toolNameMatchMetricValuesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureToolNameMatchMetricValuesIsMutable();
toolNameMatchMetricValues_.add(index, value);
onChanged();
} else {
toolNameMatchMetricValuesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolNameMatchMetricValues(
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.Builder builderForValue) {
if (toolNameMatchMetricValuesBuilder_ == null) {
ensureToolNameMatchMetricValuesIsMutable();
toolNameMatchMetricValues_.add(builderForValue.build());
onChanged();
} else {
toolNameMatchMetricValuesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addToolNameMatchMetricValues(
int index,
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.Builder builderForValue) {
if (toolNameMatchMetricValuesBuilder_ == null) {
ensureToolNameMatchMetricValuesIsMutable();
toolNameMatchMetricValues_.add(index, builderForValue.build());
onChanged();
} else {
toolNameMatchMetricValuesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder addAllToolNameMatchMetricValues(
java.lang.Iterable<? extends com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue>
values) {
if (toolNameMatchMetricValuesBuilder_ == null) {
ensureToolNameMatchMetricValuesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, toolNameMatchMetricValues_);
onChanged();
} else {
toolNameMatchMetricValuesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder clearToolNameMatchMetricValues() {
if (toolNameMatchMetricValuesBuilder_ == null) {
toolNameMatchMetricValues_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
toolNameMatchMetricValuesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public Builder removeToolNameMatchMetricValues(int index) {
if (toolNameMatchMetricValuesBuilder_ == null) {
ensureToolNameMatchMetricValuesIsMutable();
toolNameMatchMetricValues_.remove(index);
onChanged();
} else {
toolNameMatchMetricValuesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.Builder
getToolNameMatchMetricValuesBuilder(int index) {
return getToolNameMatchMetricValuesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolNameMatchMetricValueOrBuilder
getToolNameMatchMetricValuesOrBuilder(int index) {
if (toolNameMatchMetricValuesBuilder_ == null) {
return toolNameMatchMetricValues_.get(index);
} else {
return toolNameMatchMetricValuesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<
? extends com.google.cloud.aiplatform.v1.ToolNameMatchMetricValueOrBuilder>
getToolNameMatchMetricValuesOrBuilderList() {
if (toolNameMatchMetricValuesBuilder_ != null) {
return toolNameMatchMetricValuesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(toolNameMatchMetricValues_);
}
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.Builder
addToolNameMatchMetricValuesBuilder() {
return getToolNameMatchMetricValuesFieldBuilder()
.addBuilder(com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.Builder
addToolNameMatchMetricValuesBuilder(int index) {
return getToolNameMatchMetricValuesFieldBuilder()
.addBuilder(
index, com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. Tool name match metric values.
* </pre>
*
* <code>
* repeated .google.cloud.aiplatform.v1.ToolNameMatchMetricValue tool_name_match_metric_values = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*/
public java.util.List<com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.Builder>
getToolNameMatchMetricValuesBuilderList() {
return getToolNameMatchMetricValuesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue,
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.Builder,
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValueOrBuilder>
getToolNameMatchMetricValuesFieldBuilder() {
if (toolNameMatchMetricValuesBuilder_ == null) {
toolNameMatchMetricValuesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue,
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValue.Builder,
com.google.cloud.aiplatform.v1.ToolNameMatchMetricValueOrBuilder>(
toolNameMatchMetricValues_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
toolNameMatchMetricValues_ = null;
}
return toolNameMatchMetricValuesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ToolNameMatchResults)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ToolNameMatchResults)
private static final com.google.cloud.aiplatform.v1.ToolNameMatchResults DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ToolNameMatchResults();
}
public static com.google.cloud.aiplatform.v1.ToolNameMatchResults getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ToolNameMatchResults> PARSER =
new com.google.protobuf.AbstractParser<ToolNameMatchResults>() {
@java.lang.Override
public ToolNameMatchResults parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ToolNameMatchResults> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ToolNameMatchResults> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ToolNameMatchResults getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,918 | java-discoveryengine/grpc-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/UserEventServiceGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.discoveryengine.v1alpha;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service for ingesting end user actions on a website to Discovery Engine API.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/discoveryengine/v1alpha/user_event_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class UserEventServiceGrpc {
private UserEventServiceGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.cloud.discoveryengine.v1alpha.UserEventService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest,
com.google.cloud.discoveryengine.v1alpha.UserEvent>
getWriteUserEventMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "WriteUserEvent",
requestType = com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest.class,
responseType = com.google.cloud.discoveryengine.v1alpha.UserEvent.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest,
com.google.cloud.discoveryengine.v1alpha.UserEvent>
getWriteUserEventMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest,
com.google.cloud.discoveryengine.v1alpha.UserEvent>
getWriteUserEventMethod;
if ((getWriteUserEventMethod = UserEventServiceGrpc.getWriteUserEventMethod) == null) {
synchronized (UserEventServiceGrpc.class) {
if ((getWriteUserEventMethod = UserEventServiceGrpc.getWriteUserEventMethod) == null) {
UserEventServiceGrpc.getWriteUserEventMethod =
getWriteUserEventMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest,
com.google.cloud.discoveryengine.v1alpha.UserEvent>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "WriteUserEvent"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1alpha.UserEvent
.getDefaultInstance()))
.setSchemaDescriptor(
new UserEventServiceMethodDescriptorSupplier("WriteUserEvent"))
.build();
}
}
}
return getWriteUserEventMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest, com.google.api.HttpBody>
getCollectUserEventMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CollectUserEvent",
requestType = com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest.class,
responseType = com.google.api.HttpBody.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest, com.google.api.HttpBody>
getCollectUserEventMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest,
com.google.api.HttpBody>
getCollectUserEventMethod;
if ((getCollectUserEventMethod = UserEventServiceGrpc.getCollectUserEventMethod) == null) {
synchronized (UserEventServiceGrpc.class) {
if ((getCollectUserEventMethod = UserEventServiceGrpc.getCollectUserEventMethod) == null) {
UserEventServiceGrpc.getCollectUserEventMethod =
getCollectUserEventMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest,
com.google.api.HttpBody>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CollectUserEvent"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.api.HttpBody.getDefaultInstance()))
.setSchemaDescriptor(
new UserEventServiceMethodDescriptorSupplier("CollectUserEvent"))
.build();
}
}
}
return getCollectUserEventMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest,
com.google.longrunning.Operation>
getPurgeUserEventsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "PurgeUserEvents",
requestType = com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest,
com.google.longrunning.Operation>
getPurgeUserEventsMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest,
com.google.longrunning.Operation>
getPurgeUserEventsMethod;
if ((getPurgeUserEventsMethod = UserEventServiceGrpc.getPurgeUserEventsMethod) == null) {
synchronized (UserEventServiceGrpc.class) {
if ((getPurgeUserEventsMethod = UserEventServiceGrpc.getPurgeUserEventsMethod) == null) {
UserEventServiceGrpc.getPurgeUserEventsMethod =
getPurgeUserEventsMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "PurgeUserEvents"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new UserEventServiceMethodDescriptorSupplier("PurgeUserEvents"))
.build();
}
}
}
return getPurgeUserEventsMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest,
com.google.longrunning.Operation>
getImportUserEventsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ImportUserEvents",
requestType = com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest,
com.google.longrunning.Operation>
getImportUserEventsMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest,
com.google.longrunning.Operation>
getImportUserEventsMethod;
if ((getImportUserEventsMethod = UserEventServiceGrpc.getImportUserEventsMethod) == null) {
synchronized (UserEventServiceGrpc.class) {
if ((getImportUserEventsMethod = UserEventServiceGrpc.getImportUserEventsMethod) == null) {
UserEventServiceGrpc.getImportUserEventsMethod =
getImportUserEventsMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ImportUserEvents"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new UserEventServiceMethodDescriptorSupplier("ImportUserEvents"))
.build();
}
}
}
return getImportUserEventsMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static UserEventServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UserEventServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UserEventServiceStub>() {
@java.lang.Override
public UserEventServiceStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserEventServiceStub(channel, callOptions);
}
};
return UserEventServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static UserEventServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UserEventServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UserEventServiceBlockingV2Stub>() {
@java.lang.Override
public UserEventServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserEventServiceBlockingV2Stub(channel, callOptions);
}
};
return UserEventServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static UserEventServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UserEventServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UserEventServiceBlockingStub>() {
@java.lang.Override
public UserEventServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserEventServiceBlockingStub(channel, callOptions);
}
};
return UserEventServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static UserEventServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<UserEventServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<UserEventServiceFutureStub>() {
@java.lang.Override
public UserEventServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserEventServiceFutureStub(channel, callOptions);
}
};
return UserEventServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service for ingesting end user actions on a website to Discovery Engine API.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Writes a single user event.
* </pre>
*/
default void writeUserEvent(
com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1alpha.UserEvent>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getWriteUserEventMethod(), responseObserver);
}
/**
*
*
* <pre>
* Writes a single user event from the browser. This uses a GET request to
* due to browser restriction of POST-ing to a third-party domain.
* This method is used only by the Discovery Engine API JavaScript pixel and
* Google Tag Manager. Users should not call this method directly.
* </pre>
*/
default void collectUserEvent(
com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest request,
io.grpc.stub.StreamObserver<com.google.api.HttpBody> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCollectUserEventMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes permanently all user events specified by the filter provided.
* Depending on the number of events specified by the filter, this operation
* could take hours or days to complete. To test a filter, use the list
* command first.
* </pre>
*/
default void purgeUserEvents(
com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getPurgeUserEventsMethod(), responseObserver);
}
/**
*
*
* <pre>
* Bulk import of user events. Request processing might be
* synchronous. Events that already exist are skipped.
* Use this method for backfilling historical user events.
* Operation.response is of type ImportResponse. Note that it is
* possible for a subset of the items to be successfully inserted.
* Operation.metadata is of type ImportMetadata.
* </pre>
*/
default void importUserEvents(
com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getImportUserEventsMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service UserEventService.
*
* <pre>
* Service for ingesting end user actions on a website to Discovery Engine API.
* </pre>
*/
public abstract static class UserEventServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return UserEventServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service UserEventService.
*
* <pre>
* Service for ingesting end user actions on a website to Discovery Engine API.
* </pre>
*/
public static final class UserEventServiceStub
extends io.grpc.stub.AbstractAsyncStub<UserEventServiceStub> {
private UserEventServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected UserEventServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserEventServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Writes a single user event.
* </pre>
*/
public void writeUserEvent(
com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1alpha.UserEvent>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getWriteUserEventMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Writes a single user event from the browser. This uses a GET request to
* due to browser restriction of POST-ing to a third-party domain.
* This method is used only by the Discovery Engine API JavaScript pixel and
* Google Tag Manager. Users should not call this method directly.
* </pre>
*/
public void collectUserEvent(
com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest request,
io.grpc.stub.StreamObserver<com.google.api.HttpBody> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCollectUserEventMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes permanently all user events specified by the filter provided.
* Depending on the number of events specified by the filter, this operation
* could take hours or days to complete. To test a filter, use the list
* command first.
* </pre>
*/
public void purgeUserEvents(
com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getPurgeUserEventsMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Bulk import of user events. Request processing might be
* synchronous. Events that already exist are skipped.
* Use this method for backfilling historical user events.
* Operation.response is of type ImportResponse. Note that it is
* possible for a subset of the items to be successfully inserted.
* Operation.metadata is of type ImportMetadata.
* </pre>
*/
public void importUserEvents(
com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getImportUserEventsMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service UserEventService.
*
* <pre>
* Service for ingesting end user actions on a website to Discovery Engine API.
* </pre>
*/
public static final class UserEventServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<UserEventServiceBlockingV2Stub> {
private UserEventServiceBlockingV2Stub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected UserEventServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserEventServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Writes a single user event.
* </pre>
*/
public com.google.cloud.discoveryengine.v1alpha.UserEvent writeUserEvent(
com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getWriteUserEventMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Writes a single user event from the browser. This uses a GET request to
* due to browser restriction of POST-ing to a third-party domain.
* This method is used only by the Discovery Engine API JavaScript pixel and
* Google Tag Manager. Users should not call this method directly.
* </pre>
*/
public com.google.api.HttpBody collectUserEvent(
com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCollectUserEventMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes permanently all user events specified by the filter provided.
* Depending on the number of events specified by the filter, this operation
* could take hours or days to complete. To test a filter, use the list
* command first.
* </pre>
*/
public com.google.longrunning.Operation purgeUserEvents(
com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getPurgeUserEventsMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Bulk import of user events. Request processing might be
* synchronous. Events that already exist are skipped.
* Use this method for backfilling historical user events.
* Operation.response is of type ImportResponse. Note that it is
* possible for a subset of the items to be successfully inserted.
* Operation.metadata is of type ImportMetadata.
* </pre>
*/
public com.google.longrunning.Operation importUserEvents(
com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getImportUserEventsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service UserEventService.
*
* <pre>
* Service for ingesting end user actions on a website to Discovery Engine API.
* </pre>
*/
public static final class UserEventServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<UserEventServiceBlockingStub> {
private UserEventServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected UserEventServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserEventServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Writes a single user event.
* </pre>
*/
public com.google.cloud.discoveryengine.v1alpha.UserEvent writeUserEvent(
com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getWriteUserEventMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Writes a single user event from the browser. This uses a GET request to
* due to browser restriction of POST-ing to a third-party domain.
* This method is used only by the Discovery Engine API JavaScript pixel and
* Google Tag Manager. Users should not call this method directly.
* </pre>
*/
public com.google.api.HttpBody collectUserEvent(
com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCollectUserEventMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes permanently all user events specified by the filter provided.
* Depending on the number of events specified by the filter, this operation
* could take hours or days to complete. To test a filter, use the list
* command first.
* </pre>
*/
public com.google.longrunning.Operation purgeUserEvents(
com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getPurgeUserEventsMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Bulk import of user events. Request processing might be
* synchronous. Events that already exist are skipped.
* Use this method for backfilling historical user events.
* Operation.response is of type ImportResponse. Note that it is
* possible for a subset of the items to be successfully inserted.
* Operation.metadata is of type ImportMetadata.
* </pre>
*/
public com.google.longrunning.Operation importUserEvents(
com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getImportUserEventsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service UserEventService.
*
* <pre>
* Service for ingesting end user actions on a website to Discovery Engine API.
* </pre>
*/
public static final class UserEventServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<UserEventServiceFutureStub> {
private UserEventServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected UserEventServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new UserEventServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Writes a single user event.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.discoveryengine.v1alpha.UserEvent>
writeUserEvent(com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getWriteUserEventMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Writes a single user event from the browser. This uses a GET request to
* due to browser restriction of POST-ing to a third-party domain.
* This method is used only by the Discovery Engine API JavaScript pixel and
* Google Tag Manager. Users should not call this method directly.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.api.HttpBody>
collectUserEvent(com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCollectUserEventMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes permanently all user events specified by the filter provided.
* Depending on the number of events specified by the filter, this operation
* could take hours or days to complete. To test a filter, use the list
* command first.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
purgeUserEvents(com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getPurgeUserEventsMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Bulk import of user events. Request processing might be
* synchronous. Events that already exist are skipped.
* Use this method for backfilling historical user events.
* Operation.response is of type ImportResponse. Note that it is
* possible for a subset of the items to be successfully inserted.
* Operation.metadata is of type ImportMetadata.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
importUserEvents(com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getImportUserEventsMethod(), getCallOptions()), request);
}
}
private static final int METHODID_WRITE_USER_EVENT = 0;
private static final int METHODID_COLLECT_USER_EVENT = 1;
private static final int METHODID_PURGE_USER_EVENTS = 2;
private static final int METHODID_IMPORT_USER_EVENTS = 3;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_WRITE_USER_EVENT:
serviceImpl.writeUserEvent(
(com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1alpha.UserEvent>)
responseObserver);
break;
case METHODID_COLLECT_USER_EVENT:
serviceImpl.collectUserEvent(
(com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest) request,
(io.grpc.stub.StreamObserver<com.google.api.HttpBody>) responseObserver);
break;
case METHODID_PURGE_USER_EVENTS:
serviceImpl.purgeUserEvents(
(com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_IMPORT_USER_EVENTS:
serviceImpl.importUserEvents(
(com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getWriteUserEventMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1alpha.WriteUserEventRequest,
com.google.cloud.discoveryengine.v1alpha.UserEvent>(
service, METHODID_WRITE_USER_EVENT)))
.addMethod(
getCollectUserEventMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1alpha.CollectUserEventRequest,
com.google.api.HttpBody>(service, METHODID_COLLECT_USER_EVENT)))
.addMethod(
getPurgeUserEventsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1alpha.PurgeUserEventsRequest,
com.google.longrunning.Operation>(service, METHODID_PURGE_USER_EVENTS)))
.addMethod(
getImportUserEventsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1alpha.ImportUserEventsRequest,
com.google.longrunning.Operation>(service, METHODID_IMPORT_USER_EVENTS)))
.build();
}
private abstract static class UserEventServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
UserEventServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.UserEventServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("UserEventService");
}
}
private static final class UserEventServiceFileDescriptorSupplier
extends UserEventServiceBaseDescriptorSupplier {
UserEventServiceFileDescriptorSupplier() {}
}
private static final class UserEventServiceMethodDescriptorSupplier
extends UserEventServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
UserEventServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (UserEventServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new UserEventServiceFileDescriptorSupplier())
.addMethod(getWriteUserEventMethod())
.addMethod(getCollectUserEventMethod())
.addMethod(getPurgeUserEventsMethod())
.addMethod(getImportUserEventsMethod())
.build();
}
}
}
return result;
}
}
|
apache/iceberg | 35,669 | spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestStructuredStreamingRead3.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iceberg.spark.source;
import static org.apache.iceberg.expressions.Expressions.ref;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.io.File;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.IntStream;
import org.apache.iceberg.BaseTable;
import org.apache.iceberg.DataFile;
import org.apache.iceberg.DataFiles;
import org.apache.iceberg.DataOperations;
import org.apache.iceberg.DeleteFile;
import org.apache.iceberg.FileFormat;
import org.apache.iceberg.Files;
import org.apache.iceberg.ParameterizedTestExtension;
import org.apache.iceberg.RewriteFiles;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Snapshot;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableOperations;
import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.data.FileHelpers;
import org.apache.iceberg.data.GenericRecord;
import org.apache.iceberg.data.Record;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
import org.apache.iceberg.relocated.com.google.common.collect.Lists;
import org.apache.iceberg.spark.CatalogTestBase;
import org.apache.iceberg.spark.SparkReadOptions;
import org.apache.spark.api.java.function.VoidFunction2;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Encoders;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.internal.SQLConf;
import org.apache.spark.sql.streaming.DataStreamWriter;
import org.apache.spark.sql.streaming.OutputMode;
import org.apache.spark.sql.streaming.StreamingQuery;
import org.apache.spark.sql.streaming.Trigger;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.TestTemplate;
import org.junit.jupiter.api.extension.ExtendWith;
@ExtendWith(ParameterizedTestExtension.class)
public final class TestStructuredStreamingRead3 extends CatalogTestBase {
private Table table;
private final AtomicInteger microBatches = new AtomicInteger();
/**
* test data to be used by multiple writes each write creates a snapshot and writes a list of
* records
*/
private static final List<List<SimpleRecord>> TEST_DATA_MULTIPLE_SNAPSHOTS =
Lists.newArrayList(
Lists.newArrayList(
new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three")),
Lists.newArrayList(new SimpleRecord(4, "four"), new SimpleRecord(5, "five")),
Lists.newArrayList(new SimpleRecord(6, "six"), new SimpleRecord(7, "seven")));
/**
* test data - to be used for multiple write batches each batch inturn will have multiple
* snapshots
*/
private static final List<List<List<SimpleRecord>>> TEST_DATA_MULTIPLE_WRITES_MULTIPLE_SNAPSHOTS =
Lists.newArrayList(
Lists.newArrayList(
Lists.newArrayList(
new SimpleRecord(1, "one"),
new SimpleRecord(2, "two"),
new SimpleRecord(3, "three")),
Lists.newArrayList(new SimpleRecord(4, "four"), new SimpleRecord(5, "five"))),
Lists.newArrayList(
Lists.newArrayList(new SimpleRecord(6, "six"), new SimpleRecord(7, "seven")),
Lists.newArrayList(new SimpleRecord(8, "eight"), new SimpleRecord(9, "nine"))),
Lists.newArrayList(
Lists.newArrayList(
new SimpleRecord(10, "ten"),
new SimpleRecord(11, "eleven"),
new SimpleRecord(12, "twelve")),
Lists.newArrayList(
new SimpleRecord(13, "thirteen"), new SimpleRecord(14, "fourteen")),
Lists.newArrayList(
new SimpleRecord(15, "fifteen"), new SimpleRecord(16, "sixteen"))));
@BeforeAll
public static void setupSpark() {
// disable AQE as tests assume that writes generate a particular number of files
spark.conf().set(SQLConf.ADAPTIVE_EXECUTION_ENABLED().key(), "false");
}
@BeforeEach
public void setupTable() {
sql(
"CREATE TABLE %s "
+ "(id INT, data STRING) "
+ "USING iceberg "
+ "PARTITIONED BY (bucket(3, id)) "
+ "TBLPROPERTIES ('commit.manifest.min-count-to-merge'='3', 'commit.manifest-merge.enabled'='true')",
tableName);
this.table = validationCatalog.loadTable(tableIdent);
microBatches.set(0);
}
@AfterEach
public void stopStreams() throws TimeoutException {
for (StreamingQuery query : spark.streams().active()) {
query.stop();
}
}
@AfterEach
public void removeTables() {
sql("DROP TABLE IF EXISTS %s", tableName);
}
@TestTemplate
public void testReadStreamOnIcebergTableWithMultipleSnapshots() throws Exception {
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(expected);
StreamingQuery query = startStream();
List<SimpleRecord> actual = rowsAvailable(query);
assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected));
}
@TestTemplate
public void testReadStreamWithMaxFiles1() throws Exception {
appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS);
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"),
List.of(1L, 2L, 1L, 1L, 1L, 1L));
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"),
List.of(1L, 2L, 1L, 1L, 1L, 1L),
Trigger.AvailableNow());
}
@TestTemplate
public void testReadStreamWithMaxFiles2() throws Exception {
appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS);
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "2"),
List.of(3L, 2L, 2L));
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "2"),
List.of(3L, 2L, 2L),
Trigger.AvailableNow());
}
@TestTemplate
public void testReadStreamWithMaxRows1() throws Exception {
appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS);
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "1"),
List.of(1L, 2L, 1L, 1L, 1L, 1L));
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "1"),
List.of(1L, 2L, 1L, 1L, 1L, 1L),
Trigger.AvailableNow());
// soft limit of 1 is being enforced, the stream is not blocked.
StreamingQuery query = startStream(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "1");
List<SimpleRecord> actual = rowsAvailable(query);
assertThat(actual)
.containsExactlyInAnyOrderElementsOf(Iterables.concat(TEST_DATA_MULTIPLE_SNAPSHOTS));
}
@TestTemplate
public void testReadStreamWithMaxRows2() throws Exception {
appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS);
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"),
List.of(3L, 2L, 2L));
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"),
List.of(3L, 2L, 2L),
Trigger.AvailableNow());
StreamingQuery query =
startStream(ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"));
List<SimpleRecord> actual = rowsAvailable(query);
assertThat(actual)
.containsExactlyInAnyOrderElementsOf(Iterables.concat(TEST_DATA_MULTIPLE_SNAPSHOTS));
}
@TestTemplate
public void testReadStreamWithMaxRows4() throws Exception {
appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS);
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4"), List.of(4L, 3L));
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4"),
List.of(4L, 3L),
Trigger.AvailableNow());
}
@TestTemplate
public void testReadStreamWithCompositeReadLimit() throws Exception {
appendDataAsMultipleSnapshots(TEST_DATA_MULTIPLE_SNAPSHOTS);
assertMicroBatchRecordSizes(
ImmutableMap.of(
SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1",
SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"),
List.of(1L, 2L, 1L, 1L, 1L, 1L));
assertMicroBatchRecordSizes(
ImmutableMap.of(
SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1",
SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "2"),
List.of(1L, 2L, 1L, 1L, 1L, 1L),
Trigger.AvailableNow());
}
@TestTemplate
public void testAvailableNowStreamReadShouldNotHangOrReprocessData() throws Exception {
File writerCheckpointFolder = temp.resolve("writer-checkpoint-folder").toFile();
File writerCheckpoint = new File(writerCheckpointFolder, "writer-checkpoint");
File output = temp.resolve("junit").toFile();
DataStreamWriter querySource =
spark
.readStream()
.format("iceberg")
.load(tableName)
.writeStream()
.option("checkpointLocation", writerCheckpoint.toString())
.format("parquet")
.trigger(Trigger.AvailableNow())
.option("path", output.getPath());
List<SimpleRecord> expected = Lists.newArrayList();
for (List<List<SimpleRecord>> expectedCheckpoint :
TEST_DATA_MULTIPLE_WRITES_MULTIPLE_SNAPSHOTS) {
// New data was added while the stream was not running
appendDataAsMultipleSnapshots(expectedCheckpoint);
expected.addAll(Lists.newArrayList(Iterables.concat(Iterables.concat(expectedCheckpoint))));
try {
StreamingQuery query = querySource.start();
// Query should terminate on its own after processing all available data
assertThat(query.awaitTermination(60000)).isTrue();
// Check output
List<SimpleRecord> actual =
spark
.read()
.load(output.getPath())
.as(Encoders.bean(SimpleRecord.class))
.collectAsList();
assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected));
// Restarting immediately should not reprocess data
query = querySource.start();
assertThat(query.awaitTermination(60000)).isTrue();
assertThat(query.recentProgress().length).isEqualTo(1);
assertThat(query.recentProgress()[0].sources()[0].startOffset())
.isEqualTo(query.recentProgress()[0].sources()[0].endOffset());
} finally {
stopStreams();
}
}
}
@TestTemplate
public void testTriggerAvailableNowDoesNotProcessNewDataWhileRunning() throws Exception {
List<List<SimpleRecord>> expectedData = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(expectedData);
long expectedRecordCount = expectedData.stream().mapToLong(List::size).sum();
table.refresh();
long expectedSnapshotId = table.currentSnapshot().snapshotId();
String sinkTable = "availablenow_sink";
StreamingQuery query =
spark
.readStream()
.option(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1")
.format("iceberg")
.load(tableName)
.writeStream()
.format("memory")
.queryName(sinkTable)
.trigger(Trigger.AvailableNow())
.start();
assertThat(query.isActive()).isTrue();
// Add new data while the stream is running
List<SimpleRecord> newDataDuringStreamSnap1 =
Lists.newArrayList(
new SimpleRecord(100, "hundred"),
new SimpleRecord(101, "hundred-one"),
new SimpleRecord(102, "hundred-two"));
List<SimpleRecord> newDataDuringStreamSnap2 =
Lists.newArrayList(
new SimpleRecord(200, "two-hundred"), new SimpleRecord(201, "two-hundred-one"));
appendData(newDataDuringStreamSnap1);
appendData(newDataDuringStreamSnap2);
// Query should terminate on its own after processing all available data till expectedSnapshotId
assertThat(query.awaitTermination(60000)).isTrue();
List<SimpleRecord> actualResults =
spark
.sql("SELECT * FROM " + sinkTable)
.as(Encoders.bean(SimpleRecord.class))
.collectAsList();
long endOffsetSnapshotId =
StreamingOffset.fromJson(query.lastProgress().sources()[0].endOffset()).snapshotId();
// Verify the stream processed only up to the snapshot present when started
assertThat(expectedSnapshotId).isEqualTo(endOffsetSnapshotId);
// Verify only the initial data was processed
assertThat(actualResults.size()).isEqualTo(expectedRecordCount);
assertThat(actualResults).containsExactlyInAnyOrderElementsOf(Iterables.concat(expectedData));
}
@TestTemplate
public void testReadStreamOnIcebergThenAddData() throws Exception {
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
StreamingQuery query = startStream();
appendDataAsMultipleSnapshots(expected);
List<SimpleRecord> actual = rowsAvailable(query);
assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected));
}
@TestTemplate
public void testReadingStreamFromTimestamp() throws Exception {
List<SimpleRecord> dataBeforeTimestamp =
Lists.newArrayList(
new SimpleRecord(-2, "minustwo"),
new SimpleRecord(-1, "minusone"),
new SimpleRecord(0, "zero"));
appendData(dataBeforeTimestamp);
table.refresh();
long streamStartTimestamp = table.currentSnapshot().timestampMillis() + 1;
StreamingQuery query =
startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(streamStartTimestamp));
List<SimpleRecord> empty = rowsAvailable(query);
assertThat(empty).isEmpty();
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(expected);
List<SimpleRecord> actual = rowsAvailable(query);
assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected));
}
@TestTemplate
public void testReadingStreamFromFutureTimetsamp() throws Exception {
long futureTimestamp = System.currentTimeMillis() + 10000;
StreamingQuery query =
startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(futureTimestamp));
List<SimpleRecord> actual = rowsAvailable(query);
assertThat(actual).isEmpty();
List<SimpleRecord> data =
Lists.newArrayList(
new SimpleRecord(-2, "minustwo"),
new SimpleRecord(-1, "minusone"),
new SimpleRecord(0, "zero"));
// Perform several inserts that should not show up because the fromTimestamp has not elapsed
IntStream.range(0, 3)
.forEach(
x -> {
appendData(data);
assertThat(rowsAvailable(query)).isEmpty();
});
waitUntilAfter(futureTimestamp);
// Data appended after the timestamp should appear
appendData(data);
actual = rowsAvailable(query);
assertThat(actual).containsExactlyInAnyOrderElementsOf(data);
}
@TestTemplate
public void testReadingStreamFromTimestampFutureWithExistingSnapshots() throws Exception {
List<SimpleRecord> dataBeforeTimestamp =
Lists.newArrayList(
new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three"));
appendData(dataBeforeTimestamp);
long streamStartTimestamp = System.currentTimeMillis() + 2000;
// Start the stream with a future timestamp after the current snapshot
StreamingQuery query =
startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(streamStartTimestamp));
List<SimpleRecord> actual = rowsAvailable(query);
assertThat(actual).isEmpty();
// Stream should contain data added after the timestamp elapses
waitUntilAfter(streamStartTimestamp);
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(expected);
assertThat(rowsAvailable(query))
.containsExactlyInAnyOrderElementsOf(Iterables.concat(expected));
}
@TestTemplate
public void testReadingStreamFromTimestampOfExistingSnapshot() throws Exception {
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
// Create an existing snapshot with some data
appendData(expected.get(0));
table.refresh();
long firstSnapshotTime = table.currentSnapshot().timestampMillis();
// Start stream giving the first Snapshot's time as the start point
StreamingQuery stream =
startStream(SparkReadOptions.STREAM_FROM_TIMESTAMP, Long.toString(firstSnapshotTime));
// Append rest of expected data
for (int i = 1; i < expected.size(); i++) {
appendData(expected.get(i));
}
List<SimpleRecord> actual = rowsAvailable(stream);
assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected));
}
@TestTemplate
public void testReadingStreamWithExpiredSnapshotFromTimestamp() throws TimeoutException {
List<SimpleRecord> firstSnapshotRecordList = Lists.newArrayList(new SimpleRecord(1, "one"));
List<SimpleRecord> secondSnapshotRecordList = Lists.newArrayList(new SimpleRecord(2, "two"));
List<SimpleRecord> thirdSnapshotRecordList = Lists.newArrayList(new SimpleRecord(3, "three"));
List<SimpleRecord> expectedRecordList = Lists.newArrayList();
expectedRecordList.addAll(secondSnapshotRecordList);
expectedRecordList.addAll(thirdSnapshotRecordList);
appendData(firstSnapshotRecordList);
table.refresh();
long firstSnapshotid = table.currentSnapshot().snapshotId();
long firstSnapshotCommitTime = table.currentSnapshot().timestampMillis();
appendData(secondSnapshotRecordList);
appendData(thirdSnapshotRecordList);
table.expireSnapshots().expireSnapshotId(firstSnapshotid).commit();
StreamingQuery query =
startStream(
SparkReadOptions.STREAM_FROM_TIMESTAMP, String.valueOf(firstSnapshotCommitTime));
List<SimpleRecord> actual = rowsAvailable(query);
assertThat(actual).containsExactlyInAnyOrderElementsOf(expectedRecordList);
}
@TestTemplate
public void testResumingStreamReadFromCheckpoint() throws Exception {
File writerCheckpointFolder = temp.resolve("writer-checkpoint-folder").toFile();
File writerCheckpoint = new File(writerCheckpointFolder, "writer-checkpoint");
File output = temp.resolve("junit").toFile();
DataStreamWriter querySource =
spark
.readStream()
.format("iceberg")
.load(tableName)
.writeStream()
.option("checkpointLocation", writerCheckpoint.toString())
.format("parquet")
.queryName("checkpoint_test")
.option("path", output.getPath());
StreamingQuery startQuery = querySource.start();
startQuery.processAllAvailable();
startQuery.stop();
List<SimpleRecord> expected = Lists.newArrayList();
for (List<List<SimpleRecord>> expectedCheckpoint :
TEST_DATA_MULTIPLE_WRITES_MULTIPLE_SNAPSHOTS) {
// New data was added while the stream was down
appendDataAsMultipleSnapshots(expectedCheckpoint);
expected.addAll(Lists.newArrayList(Iterables.concat(Iterables.concat(expectedCheckpoint))));
// Stream starts up again from checkpoint read the newly added data and shut down
StreamingQuery restartedQuery = querySource.start();
restartedQuery.processAllAvailable();
restartedQuery.stop();
// Read data added by the stream
List<SimpleRecord> actual =
spark.read().load(output.getPath()).as(Encoders.bean(SimpleRecord.class)).collectAsList();
assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected));
}
}
@TestTemplate
public void testFailReadingCheckpointInvalidSnapshot() throws IOException, TimeoutException {
File writerCheckpointFolder = temp.resolve("writer-checkpoint-folder").toFile();
File writerCheckpoint = new File(writerCheckpointFolder, "writer-checkpoint");
File output = temp.resolve("junit").toFile();
DataStreamWriter querySource =
spark
.readStream()
.format("iceberg")
.load(tableName)
.writeStream()
.option("checkpointLocation", writerCheckpoint.toString())
.format("parquet")
.queryName("checkpoint_test")
.option("path", output.getPath());
List<SimpleRecord> firstSnapshotRecordList = Lists.newArrayList(new SimpleRecord(1, "one"));
List<SimpleRecord> secondSnapshotRecordList = Lists.newArrayList(new SimpleRecord(2, "two"));
StreamingQuery startQuery = querySource.start();
appendData(firstSnapshotRecordList);
table.refresh();
long firstSnapshotid = table.currentSnapshot().snapshotId();
startQuery.processAllAvailable();
startQuery.stop();
appendData(secondSnapshotRecordList);
table.expireSnapshots().expireSnapshotId(firstSnapshotid).commit();
StreamingQuery restartedQuery = querySource.start();
assertThatThrownBy(restartedQuery::processAllAvailable)
.hasCauseInstanceOf(IllegalStateException.class)
.hasMessageContaining(
String.format(
"Cannot load current offset at snapshot %d, the snapshot was expired or removed",
firstSnapshotid));
}
@TestTemplate
public void testParquetOrcAvroDataInOneTable() throws Exception {
List<SimpleRecord> parquetFileRecords =
Lists.newArrayList(
new SimpleRecord(1, "one"), new SimpleRecord(2, "two"), new SimpleRecord(3, "three"));
List<SimpleRecord> orcFileRecords =
Lists.newArrayList(new SimpleRecord(4, "four"), new SimpleRecord(5, "five"));
List<SimpleRecord> avroFileRecords =
Lists.newArrayList(new SimpleRecord(6, "six"), new SimpleRecord(7, "seven"));
appendData(parquetFileRecords);
appendData(orcFileRecords, "orc");
appendData(avroFileRecords, "avro");
StreamingQuery query = startStream();
assertThat(rowsAvailable(query))
.containsExactlyInAnyOrderElementsOf(
Iterables.concat(parquetFileRecords, orcFileRecords, avroFileRecords));
}
@TestTemplate
public void testReadStreamFromEmptyTable() throws Exception {
StreamingQuery stream = startStream();
List<SimpleRecord> actual = rowsAvailable(stream);
assertThat(actual).isEmpty();
}
@TestTemplate
public void testReadStreamWithSnapshotTypeOverwriteErrorsOut() throws Exception {
// upgrade table to version 2 - to facilitate creation of Snapshot of type OVERWRITE.
TableOperations ops = ((BaseTable) table).operations();
TableMetadata meta = ops.current();
ops.commit(meta, meta.upgradeToFormatVersion(2));
// fill table with some initial data
List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(dataAcrossSnapshots);
Schema deleteRowSchema = table.schema().select("data");
Record dataDelete = GenericRecord.create(deleteRowSchema);
List<Record> dataDeletes =
Lists.newArrayList(
dataDelete.copy("data", "one") // id = 1
);
DeleteFile eqDeletes =
FileHelpers.writeDeleteFile(
table,
Files.localOutput(File.createTempFile("junit", null, temp.toFile())),
TestHelpers.Row.of(0),
dataDeletes,
deleteRowSchema);
DataFile dataFile =
DataFiles.builder(table.spec())
.withPath(File.createTempFile("junit", null, temp.toFile()).getPath())
.withFileSizeInBytes(10)
.withRecordCount(1)
.withFormat(FileFormat.PARQUET)
.build();
table.newRowDelta().addRows(dataFile).addDeletes(eqDeletes).commit();
// check pre-condition - that the above Delete file write - actually resulted in snapshot of
// type OVERWRITE
assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.OVERWRITE);
StreamingQuery query = startStream();
assertThatThrownBy(query::processAllAvailable)
.cause()
.isInstanceOf(IllegalStateException.class)
.hasMessageStartingWith("Cannot process overwrite snapshot");
}
@TestTemplate
public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplace() throws Exception {
// fill table with some data
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(expected);
makeRewriteDataFiles();
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"),
List.of(1L, 2L, 1L, 1L, 1L, 1L));
}
@TestTemplate
public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplaceMaxRows()
throws Exception {
// fill table with some data
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(expected);
makeRewriteDataFiles();
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH, "4"), List.of(4L, 3L));
}
@TestTemplate
public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplaceMaxFilesAndRows()
throws Exception {
// fill table with some data
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(expected);
makeRewriteDataFiles();
assertMicroBatchRecordSizes(
ImmutableMap.of(
SparkReadOptions.STREAMING_MAX_ROWS_PER_MICRO_BATCH,
"4",
SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH,
"1"),
List.of(1L, 2L, 1L, 1L, 1L, 1L));
}
@TestTemplate
public void testReadStreamWithSnapshotType2RewriteDataFilesIgnoresReplace() throws Exception {
// fill table with some data
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(expected);
makeRewriteDataFiles();
makeRewriteDataFiles();
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"),
List.of(1L, 2L, 1L, 1L, 1L, 1L));
}
@TestTemplate
public void testReadStreamWithSnapshotTypeRewriteDataFilesIgnoresReplaceFollowedByAppend()
throws Exception {
// fill table with some data
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(expected);
makeRewriteDataFiles();
appendDataAsMultipleSnapshots(expected);
assertMicroBatchRecordSizes(
ImmutableMap.of(SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"),
List.of(1L, 2L, 1L, 1L, 1L, 1L, 1L, 2L, 1L, 1L, 1L, 1L));
}
@TestTemplate
public void testReadStreamWithSnapshotTypeReplaceIgnoresReplace() throws Exception {
// fill table with some data
List<List<SimpleRecord>> expected = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(expected);
// this should create a snapshot with type Replace.
table.rewriteManifests().clusterBy(f -> 1).commit();
// check pre-condition
assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.REPLACE);
StreamingQuery query = startStream();
List<SimpleRecord> actual = rowsAvailable(query);
assertThat(actual).containsExactlyInAnyOrderElementsOf(Iterables.concat(expected));
}
@TestTemplate
public void testReadStreamWithSnapshotTypeDeleteErrorsOut() throws Exception {
table.updateSpec().removeField("id_bucket").addField(ref("id")).commit();
// fill table with some data
List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(dataAcrossSnapshots);
// this should create a snapshot with type delete.
table.newDelete().deleteFromRowFilter(Expressions.equal("id", 4)).commit();
// check pre-condition - that the above delete operation on table resulted in Snapshot of Type
// DELETE.
assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.DELETE);
StreamingQuery query = startStream();
assertThatThrownBy(query::processAllAvailable)
.cause()
.isInstanceOf(IllegalStateException.class)
.hasMessageStartingWith("Cannot process delete snapshot");
}
@TestTemplate
public void testReadStreamWithSnapshotTypeDeleteAndSkipDeleteOption() throws Exception {
table.updateSpec().removeField("id_bucket").addField(ref("id")).commit();
// fill table with some data
List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(dataAcrossSnapshots);
// this should create a snapshot with type delete.
table.newDelete().deleteFromRowFilter(Expressions.equal("id", 4)).commit();
// check pre-condition - that the above delete operation on table resulted in Snapshot of Type
// DELETE.
assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.DELETE);
StreamingQuery query = startStream(SparkReadOptions.STREAMING_SKIP_DELETE_SNAPSHOTS, "true");
assertThat(rowsAvailable(query))
.containsExactlyInAnyOrderElementsOf(Iterables.concat(dataAcrossSnapshots));
}
@TestTemplate
public void testReadStreamWithSnapshotTypeDeleteAndSkipOverwriteOption() throws Exception {
table.updateSpec().removeField("id_bucket").addField(ref("id")).commit();
// fill table with some data
List<List<SimpleRecord>> dataAcrossSnapshots = TEST_DATA_MULTIPLE_SNAPSHOTS;
appendDataAsMultipleSnapshots(dataAcrossSnapshots);
DataFile dataFile =
DataFiles.builder(table.spec())
.withPath(File.createTempFile("junit", null, temp.toFile()).getPath())
.withFileSizeInBytes(10)
.withRecordCount(1)
.withFormat(FileFormat.PARQUET)
.build();
// this should create a snapshot with type overwrite.
table
.newOverwrite()
.addFile(dataFile)
.overwriteByRowFilter(Expressions.greaterThan("id", 4))
.commit();
// check pre-condition - that the above delete operation on table resulted in Snapshot of Type
// OVERWRITE.
assertThat(table.currentSnapshot().operation()).isEqualTo(DataOperations.OVERWRITE);
StreamingQuery query = startStream(SparkReadOptions.STREAMING_SKIP_OVERWRITE_SNAPSHOTS, "true");
assertThat(rowsAvailable(query))
.containsExactlyInAnyOrderElementsOf(Iterables.concat(dataAcrossSnapshots));
}
/**
* We are testing that all the files in a rewrite snapshot are skipped Create a rewrite data files
* snapshot using existing files.
*/
public void makeRewriteDataFiles() {
table.refresh();
// we are testing that all the files in a rewrite snapshot are skipped
// create a rewrite data files snapshot using existing files
RewriteFiles rewrite = table.newRewrite();
Iterable<Snapshot> it = table.snapshots();
for (Snapshot snapshot : it) {
if (snapshot.operation().equals(DataOperations.APPEND)) {
Iterable<DataFile> datafiles = snapshot.addedDataFiles(table.io());
for (DataFile datafile : datafiles) {
rewrite.addFile(datafile);
rewrite.deleteFile(datafile);
}
}
}
rewrite.commit();
}
/**
* appends each list as a Snapshot on the iceberg table at the given location. accepts a list of
* lists - each list representing data per snapshot.
*/
private void appendDataAsMultipleSnapshots(List<List<SimpleRecord>> data) {
for (List<SimpleRecord> l : data) {
appendData(l);
}
}
private void appendData(List<SimpleRecord> data) {
appendData(data, "parquet");
}
private void appendData(List<SimpleRecord> data, String format) {
Dataset<Row> df = spark.createDataFrame(data, SimpleRecord.class);
df.select("id", "data")
.write()
.format("iceberg")
.option("write-format", format)
.mode("append")
.save(tableName);
}
private static final String MEMORY_TABLE = "_stream_view_mem";
private StreamingQuery startStream(Map<String, String> options) throws TimeoutException {
return spark
.readStream()
.options(options)
.format("iceberg")
.load(tableName)
.writeStream()
.options(options)
.format("memory")
.queryName(MEMORY_TABLE)
.outputMode(OutputMode.Append())
.start();
}
private StreamingQuery startStream() throws TimeoutException {
return startStream(Collections.emptyMap());
}
private StreamingQuery startStream(String key, String value) throws TimeoutException {
return startStream(
ImmutableMap.of(key, value, SparkReadOptions.STREAMING_MAX_FILES_PER_MICRO_BATCH, "1"));
}
private void assertMicroBatchRecordSizes(
Map<String, String> options, List<Long> expectedMicroBatchRecordSize)
throws TimeoutException {
assertMicroBatchRecordSizes(options, expectedMicroBatchRecordSize, Trigger.ProcessingTime(0L));
}
private void assertMicroBatchRecordSizes(
Map<String, String> options, List<Long> expectedMicroBatchRecordSize, Trigger trigger)
throws TimeoutException {
Dataset<Row> ds = spark.readStream().options(options).format("iceberg").load(tableName);
List<Long> syncList = Collections.synchronizedList(Lists.newArrayList());
ds.writeStream()
.options(options)
.trigger(trigger)
.foreachBatch(
(VoidFunction2<Dataset<Row>, Long>)
(dataset, batchId) -> {
microBatches.getAndIncrement();
syncList.add(dataset.count());
})
.start()
.processAllAvailable();
stopStreams();
assertThat(syncList).containsExactlyInAnyOrderElementsOf(expectedMicroBatchRecordSize);
}
private List<SimpleRecord> rowsAvailable(StreamingQuery query) {
query.processAllAvailable();
return spark
.sql("select * from " + MEMORY_TABLE)
.as(Encoders.bean(SimpleRecord.class))
.collectAsList();
}
}
|
googleapis/google-api-java-client | 35,868 | google-api-client/src/main/java/com/google/api/client/googleapis/auth/oauth2/GoogleCredential.java | /*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.api.client.googleapis.auth.oauth2;
import com.google.api.client.auth.oauth2.BearerToken;
import com.google.api.client.auth.oauth2.ClientParametersAuthentication;
import com.google.api.client.auth.oauth2.Credential;
import com.google.api.client.auth.oauth2.CredentialRefreshListener;
import com.google.api.client.auth.oauth2.DataStoreCredentialRefreshListener;
import com.google.api.client.auth.oauth2.TokenRequest;
import com.google.api.client.auth.oauth2.TokenResponse;
import com.google.api.client.googleapis.auth.oauth2.GoogleClientSecrets.Details;
import com.google.api.client.googleapis.util.Utils;
import com.google.api.client.http.GenericUrl;
import com.google.api.client.http.HttpExecuteInterceptor;
import com.google.api.client.http.HttpRequestInitializer;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.HttpUnsuccessfulResponseHandler;
import com.google.api.client.json.GenericJson;
import com.google.api.client.json.JsonFactory;
import com.google.api.client.json.JsonObjectParser;
import com.google.api.client.json.webtoken.JsonWebSignature;
import com.google.api.client.json.webtoken.JsonWebToken;
import com.google.api.client.util.Beta;
import com.google.api.client.util.Clock;
import com.google.api.client.util.Joiner;
import com.google.api.client.util.PemReader;
import com.google.api.client.util.PemReader.Section;
import com.google.api.client.util.Preconditions;
import com.google.api.client.util.SecurityUtils;
import com.google.api.client.util.store.DataStoreFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.security.GeneralSecurityException;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.Collection;
import java.util.Collections;
/**
* Thread-safe Google-specific implementation of the OAuth 2.0 helper for accessing protected
* resources using an access token, as well as optionally refreshing the access token when it
* expires using a refresh token.
*
* <p>There are three modes supported: access token only, refresh token flow, and service account
* flow (with or without impersonating a user).
*
* <p>If all you have is an access token, you simply pass the {@link TokenResponse} to the
* credential using {@link Builder#setFromTokenResponse(TokenResponse)}. Google credential uses
* {@link BearerToken#authorizationHeaderAccessMethod()} as the access method. Sample usage:
*
* <pre>{@code
* public static GoogleCredential createCredentialWithAccessTokenOnly(TokenResponse tokenResponse) {
* return new GoogleCredential().setFromTokenResponse(tokenResponse);
* }
* }</pre>
*
* <p>If you have a refresh token, it is similar to the case of access token only, but you
* additionally need to pass the credential the client secrets using {@link
* Builder#setClientSecrets(GoogleClientSecrets)} or {@link Builder#setClientSecrets(String,
* String)}. Google credential uses {@link GoogleOAuthConstants#TOKEN_SERVER_URL} as the token
* server URL, and {@link ClientParametersAuthentication} with the client ID and secret as the
* client authentication. Sample usage:
*
* <pre>{@code
* public static GoogleCredential createCredentialWithRefreshToken(
* HttpTransport transport, JsonFactory jsonFactory,
* GoogleClientSecrets clientSecrets, TokenResponse tokenResponse) {
* return new GoogleCredential.Builder().setTransport(transport)
* .setJsonFactory(jsonFactory)
* .setClientSecrets(clientSecrets)
* .build()
* .setFromTokenResponse(tokenResponse);
* }
* }</pre>
*
* <p>The <a href= "https://developers.google.com/accounts/docs/OAuth2ServiceAccount">service
* account flow</a> is used when you want to access data owned by your client application. You
* download the private key in a {@code .p12} file from the Google APIs Console. Use {@link
* Builder#setServiceAccountId(String)}, {@link
* Builder#setServiceAccountPrivateKeyFromP12File(File)}, and {@link
* Builder#setServiceAccountScopes(Collection)}. Sample usage:
*
* <pre>{@code
* public static GoogleCredential createCredentialForServiceAccount(HttpTransport transport,
* JsonFactory jsonFactory,
* String serviceAccountId, Collection<String> serviceAccountScopes, File p12File)
* throws GeneralSecurityException, IOException {
* return new GoogleCredential.Builder().setTransport(transport).setJsonFactory(jsonFactory)
* .setServiceAccountId(serviceAccountId).setServiceAccountScopes(serviceAccountScopes)
* .setServiceAccountPrivateKeyFromP12File(p12File).build();
* }
* }</pre>
*
* <p>You can also use the service account flow to impersonate a user in a domain that you own. This
* is very similar to the service account flow above, but you additionally call {@link
* Builder#setServiceAccountUser(String)}. Sample usage:
*
* <pre>{@code
* public static GoogleCredential createCredentialForServiceAccountImpersonateUser
* (HttpTransport transport, JsonFactory jsonFactory, String serviceAccountId,
* Collection<String> serviceAccountScopes, File p12File,
* String serviceAccountUser) throws GeneralSecurityException, IOException {
* return new GoogleCredential.Builder()
* .setTransport(transport)
* .setJsonFactory(jsonFactory)
* .setServiceAccountId(serviceAccountId)
* .setServiceAccountScopes(serviceAccountScopes)
* .setServiceAccountPrivateKeyFromP12File(p12File)
* .setServiceAccountUser(serviceAccountUser)
* .build();
* }
* }</pre>
*
* <p>If you need to persist the access token in a data store, use {@link DataStoreFactory} and
* {@link Builder#addRefreshListener(CredentialRefreshListener)} with {@link
* DataStoreCredentialRefreshListener}.
*
* <p>If you have a custom request initializer, request execute interceptor, or unsuccessful
* response handler, take a look at the sample usage for {@link HttpExecuteInterceptor} and {@link
* HttpUnsuccessfulResponseHandler}, which are interfaces that this class also implements.
*
* @since 1.7
* @author Yaniv Inbar
* @deprecated Please use <a href="https://github.com/googleapis/google-auth-library-java">
* google-auth-library</a> for handling Application Default Credentials and other non-OAuth2
* based authentication.
*/
@Deprecated
public class GoogleCredential extends Credential {
static final String USER_FILE_TYPE = "authorized_user";
static final String SERVICE_ACCOUNT_FILE_TYPE = "service_account";
@Beta
private static DefaultCredentialProvider defaultCredentialProvider =
new DefaultCredentialProvider();
/**
* {@link Beta} <br>
* Returns the Application Default Credentials.
*
* <p>Returns the Application Default Credentials which are credentials that identify and
* authorize the whole application. This is the built-in service account if running on Google
* Compute Engine or the credentials file from the path in the environment variable
* GOOGLE_APPLICATION_CREDENTIALS.
*
* @return the credential instance.
* @throws IOException if the credential cannot be created in the current environment.
*/
@Beta
public static GoogleCredential getApplicationDefault() throws IOException {
return getApplicationDefault(Utils.getDefaultTransport(), Utils.getDefaultJsonFactory());
}
/**
* {@link Beta} <br>
* Returns the Application Default Credentials.
*
* <p>Returns the Application Default Credentials which are credentials that identify and
* authorize the whole application. This is the built-in service account if running on Google
* Compute Engine or the credentials file from the path in the environment variable
* GOOGLE_APPLICATION_CREDENTIALS.
*
* @param transport the transport for Http calls.
* @param jsonFactory the factory for Json parsing and formatting.
* @return the credential instance.
* @throws IOException if the credential cannot be created in the current environment.
*/
@Beta
public static GoogleCredential getApplicationDefault(
HttpTransport transport, JsonFactory jsonFactory) throws IOException {
Preconditions.checkNotNull(transport);
Preconditions.checkNotNull(jsonFactory);
return defaultCredentialProvider.getDefaultCredential(transport, jsonFactory);
}
/**
* {@link Beta} <br>
*
* <p>Important: This method does not validate the credential configuration. A security risk holds
* when a credential configuration is accepted from a source that is not under your control and
* used without validation on your side.
*
* <p>If you are loading your credential configuration from an untrusted source and have not
* mitigated the risks (e.g. by validating the configuration yourself), make these changes as soon
* as possible to prevent security risks to your environment.
*
* <p>Regardless of the method used, it is always your responsibility to validate configurations
* received from external sources.
*
* <p>See the {@link <a
* href="https://cloud.google.com/docs/authentication/external/externally-sourced-credentials">documentation</a>}
* for more details.
*
* <p>Returns a credential defined by a Json file.
*
* @param credentialStream the stream with the credential definition.
* @return the credential defined by the credentialStream.
* @throws IOException if the credential cannot be created from the stream.
* @deprecated This method is being deprecated because of a potential security risk.
* Please use {@link <a href="https://cloud.google.com/java/docs/reference/google-auth-library/latest/com.google.auth.oauth2.GoogleCredentials">GoogleCredentials</a> instead.
*/
@Beta
@Deprecated
public static GoogleCredential fromStream(InputStream credentialStream) throws IOException {
return fromStream(credentialStream, Utils.getDefaultTransport(), Utils.getDefaultJsonFactory());
}
/**
* {@link Beta} <br>
*
* <p>Important: This method does not validate the credential configuration. A security risk holds
* when a credential configuration is accepted from a source that is not under your control and
* used without validation on your side.
*
* <p>If you are loading your credential configuration from an untrusted source and have not
* mitigated the risks (e.g. by validating the configuration yourself), make these changes as soon
* as possible to prevent security risks to your environment.
*
* <p>Regardless of the method used, it is always your responsibility to validate configurations
* received from external sources.
*
* <p>See the {@link <a
* href="https://cloud.google.com/docs/authentication/external/externally-sourced-credentials">documentation</a>}
* for more details.
*
* <p>Returns a credential defined by a Json file.
*
* @param credentialStream the stream with the credential definition.
* @return the credential defined by the credentialStream.
* @throws IOException if the credential cannot be created from the stream.
* @deprecated This method is being deprecated because of a potential security risk.
* Please use {@link <a href="https://cloud.google.com/java/docs/reference/google-auth-library/latest/com.google.auth.oauth2.GoogleCredentials">GoogleCredentials</a> instead.
*/
@Beta
@Deprecated
public static GoogleCredential fromStream(
InputStream credentialStream, HttpTransport transport, JsonFactory jsonFactory)
throws IOException {
Preconditions.checkNotNull(credentialStream);
Preconditions.checkNotNull(transport);
Preconditions.checkNotNull(jsonFactory);
JsonObjectParser parser = new JsonObjectParser(jsonFactory);
GenericJson fileContents =
parser.parseAndClose(credentialStream, OAuth2Utils.UTF_8, GenericJson.class);
String fileType = (String) fileContents.get("type");
if (fileType == null) {
throw new IOException("Error reading credentials from stream, 'type' field not specified.");
}
if (USER_FILE_TYPE.equals(fileType)) {
return fromStreamUser(fileContents, transport, jsonFactory);
}
if (SERVICE_ACCOUNT_FILE_TYPE.equals(fileType)) {
return fromStreamServiceAccount(fileContents, transport, jsonFactory);
}
throw new IOException(
String.format(
"Error reading credentials from stream, 'type' value '%s' not recognized."
+ " Expecting '%s' or '%s'.",
fileType, USER_FILE_TYPE, SERVICE_ACCOUNT_FILE_TYPE));
}
/**
* Service account ID (typically an e-mail address) or {@code null} if not using the service
* account flow.
*/
private String serviceAccountId;
/**
* Service account Project ID or {@code null} if not present, either because this is not using the
* service account flow, or is using an older version of the service account configuration.
*/
private String serviceAccountProjectId;
/**
* Collection of OAuth scopes to use with the service account flow or {@code null} if not using
* the service account flow.
*/
private Collection<String> serviceAccountScopes;
/**
* Private key to use with the service account flow or {@code null} if not using the service
* account flow.
*/
private PrivateKey serviceAccountPrivateKey;
/**
* ID of private key to use with the service account flow or {@code null} if not using the service
* account flow.
*/
private String serviceAccountPrivateKeyId;
/**
* Email address of the user the application is trying to impersonate in the service account flow
* or {@code null} for none or if not using the service account flow.
*/
private String serviceAccountUser;
/**
* Constructor with the ability to access protected resources, but not refresh tokens.
*
* <p>To use with the ability to refresh tokens, use {@link Builder}.
*/
public GoogleCredential() {
this(new Builder());
}
/**
* @param builder Google credential builder
* @since 1.14
*/
protected GoogleCredential(Builder builder) {
super(builder);
if (builder.serviceAccountPrivateKey == null) {
Preconditions.checkArgument(
builder.serviceAccountId == null
&& builder.serviceAccountScopes == null
&& builder.serviceAccountUser == null);
} else {
serviceAccountId = Preconditions.checkNotNull(builder.serviceAccountId);
serviceAccountProjectId = builder.serviceAccountProjectId;
serviceAccountScopes =
(builder.serviceAccountScopes == null)
? Collections.<String>emptyList()
: Collections.unmodifiableCollection(builder.serviceAccountScopes);
serviceAccountPrivateKey = builder.serviceAccountPrivateKey;
serviceAccountPrivateKeyId = builder.serviceAccountPrivateKeyId;
serviceAccountUser = builder.serviceAccountUser;
}
}
@Override
public GoogleCredential setAccessToken(String accessToken) {
return (GoogleCredential) super.setAccessToken(accessToken);
}
@Override
public GoogleCredential setRefreshToken(String refreshToken) {
if (refreshToken != null) {
Preconditions.checkArgument(
getJsonFactory() != null && getTransport() != null && getClientAuthentication() != null,
"Please use the Builder and call setJsonFactory, setTransport and setClientSecrets");
}
return (GoogleCredential) super.setRefreshToken(refreshToken);
}
@Override
public GoogleCredential setExpirationTimeMilliseconds(Long expirationTimeMilliseconds) {
return (GoogleCredential) super.setExpirationTimeMilliseconds(expirationTimeMilliseconds);
}
@Override
public GoogleCredential setExpiresInSeconds(Long expiresIn) {
return (GoogleCredential) super.setExpiresInSeconds(expiresIn);
}
@Override
public GoogleCredential setFromTokenResponse(TokenResponse tokenResponse) {
return (GoogleCredential) super.setFromTokenResponse(tokenResponse);
}
@Override
@Beta
protected TokenResponse executeRefreshToken() throws IOException {
if (serviceAccountPrivateKey == null) {
return super.executeRefreshToken();
}
// service accounts: no refresh token; instead use private key to request new access token
JsonWebSignature.Header header = new JsonWebSignature.Header();
header.setAlgorithm("RS256");
header.setType("JWT");
header.setKeyId(serviceAccountPrivateKeyId);
JsonWebToken.Payload payload = new JsonWebToken.Payload();
long currentTime = getClock().currentTimeMillis();
payload.setIssuer(serviceAccountId);
payload.setAudience(getTokenServerEncodedUrl());
payload.setIssuedAtTimeSeconds(currentTime / 1000);
payload.setExpirationTimeSeconds(currentTime / 1000 + 3600);
payload.setSubject(serviceAccountUser);
payload.put("scope", Joiner.on(' ').join(serviceAccountScopes));
try {
String assertion =
JsonWebSignature.signUsingRsaSha256(
serviceAccountPrivateKey, getJsonFactory(), header, payload);
TokenRequest request =
new TokenRequest(
getTransport(),
getJsonFactory(),
new GenericUrl(getTokenServerEncodedUrl()),
"urn:ietf:params:oauth:grant-type:jwt-bearer");
request.put("assertion", assertion);
return request.execute();
} catch (GeneralSecurityException exception) {
IOException e = new IOException();
e.initCause(exception);
throw e;
}
}
/**
* Returns the service account ID (typically an e-mail address) or {@code null} if not using the
* service account flow.
*/
public final String getServiceAccountId() {
return serviceAccountId;
}
/**
* Returns the service account Project ID or {@code null} if not present, either because this is
* not using the service account flow, or is using an older version of the service account
* configuration.
*/
public final String getServiceAccountProjectId() {
return serviceAccountProjectId;
}
/**
* Returns a collection of OAuth scopes to use with the service account flow or {@code null} if
* not using the service account flow.
*/
public final Collection<String> getServiceAccountScopes() {
return serviceAccountScopes;
}
/**
* Returns the space-separated OAuth scopes to use with the service account flow or {@code null}
* if not using the service account flow.
*
* @since 1.15
*/
public final String getServiceAccountScopesAsString() {
return serviceAccountScopes == null ? null : Joiner.on(' ').join(serviceAccountScopes);
}
/**
* Returns the private key to use with the service account flow or {@code null} if not using the
* service account flow.
*/
public final PrivateKey getServiceAccountPrivateKey() {
return serviceAccountPrivateKey;
}
/**
* {@link Beta} <br>
* Returns the ID of the private key to use with the service account flow or {@code null} if not
* using the service account flow.
*/
@Beta
public final String getServiceAccountPrivateKeyId() {
return serviceAccountPrivateKeyId;
}
/**
* Returns the email address of the user the application is trying to impersonate in the service
* account flow or {@code null} for none or if not using the service account flow.
*/
public final String getServiceAccountUser() {
return serviceAccountUser;
}
/**
* {@link Beta} <br>
* Indicates whether the credential requires scopes to be specified by calling createScoped before
* use.
*/
@Beta
public boolean createScopedRequired() {
if (serviceAccountPrivateKey == null) {
return false;
}
return (serviceAccountScopes == null || serviceAccountScopes.isEmpty());
}
/**
* {@link Beta} <br>
* For credentials that require scopes, creates a copy of the credential with the specified
* scopes.
*/
@Beta
public GoogleCredential createScoped(Collection<String> scopes) {
if (serviceAccountPrivateKey == null) {
return this;
}
return toBuilder().setServiceAccountScopes(scopes).build();
}
/**
* {@link Beta} <br>
* For service accounts that need to delegate to a specific user, create a copy of the credential
* with the specified user.
*/
@Beta
public GoogleCredential createDelegated(String user) {
if (serviceAccountPrivateKey == null) {
return this;
}
return toBuilder().setServiceAccountUser(user).build();
}
/**
* {@link Beta} <br>
* Create a builder from this credential.
*/
@Beta
public Builder toBuilder() {
Builder builder =
new GoogleCredential.Builder()
.setServiceAccountPrivateKey(serviceAccountPrivateKey)
.setServiceAccountPrivateKeyId(serviceAccountPrivateKeyId)
.setServiceAccountId(serviceAccountId)
.setServiceAccountProjectId(serviceAccountProjectId)
.setServiceAccountUser(serviceAccountUser)
.setServiceAccountScopes(serviceAccountScopes)
.setTokenServerEncodedUrl(getTokenServerEncodedUrl())
.setTransport(getTransport())
.setJsonFactory(getJsonFactory())
.setClock(getClock());
builder.setClientAuthentication(getClientAuthentication());
return builder;
}
/**
* Google credential builder.
*
* <p>Implementation is not thread-safe.
*/
public static class Builder extends Credential.Builder {
/** Service account ID (typically an e-mail address) or {@code null} for none. */
String serviceAccountId;
/** Collection of OAuth scopes to use with the service account flow or {@code null} for none. */
Collection<String> serviceAccountScopes;
/** Private key to use with the service account flow or {@code null} for none. */
PrivateKey serviceAccountPrivateKey;
/** Id of the private key to use with the service account flow or {@code null} for none. */
String serviceAccountPrivateKeyId;
/** Project ID associated with the Service Account. */
String serviceAccountProjectId;
/**
* Email address of the user the application is trying to impersonate in the service account
* flow or {@code null} for none.
*/
String serviceAccountUser;
public Builder() {
super(BearerToken.authorizationHeaderAccessMethod());
setTokenServerEncodedUrl(GoogleOAuthConstants.TOKEN_SERVER_URL);
}
@Override
public GoogleCredential build() {
return new GoogleCredential(this);
}
@Override
public Builder setTransport(HttpTransport transport) {
return (Builder) super.setTransport(transport);
}
@Override
public Builder setJsonFactory(JsonFactory jsonFactory) {
return (Builder) super.setJsonFactory(jsonFactory);
}
/** @since 1.9 */
@Override
public Builder setClock(Clock clock) {
return (Builder) super.setClock(clock);
}
/**
* Sets the client identifier and secret.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*/
public Builder setClientSecrets(String clientId, String clientSecret) {
setClientAuthentication(new ClientParametersAuthentication(clientId, clientSecret));
return this;
}
/**
* Sets the client secrets.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*/
public Builder setClientSecrets(GoogleClientSecrets clientSecrets) {
Details details = clientSecrets.getDetails();
setClientAuthentication(
new ClientParametersAuthentication(details.getClientId(), details.getClientSecret()));
return this;
}
/** Returns the service account ID (typically an e-mail address) or {@code null} for none. */
public final String getServiceAccountId() {
return serviceAccountId;
}
/**
* Sets the service account ID (typically an e-mail address) or {@code null} for none.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*/
public Builder setServiceAccountId(String serviceAccountId) {
this.serviceAccountId = serviceAccountId;
return this;
}
/** Returns the service account Project ID or {@code null} for none. */
public final String getServiceAccountProjectId() {
return serviceAccountProjectId;
}
/**
* Sets the service account Project ID or {@code null} for none.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*/
public Builder setServiceAccountProjectId(String serviceAccountProjectId) {
this.serviceAccountProjectId = serviceAccountProjectId;
return this;
}
/**
* Returns a collection of OAuth scopes to use with the service account flow or {@code null} for
* none.
*/
public final Collection<String> getServiceAccountScopes() {
return serviceAccountScopes;
}
/**
* Sets the space-separated OAuth scopes to use with the service account flow or {@code null}
* for none.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*
* @param serviceAccountScopes collection of scopes to be joined by a space separator (or a
* single value containing multiple space-separated scopes)
* @since 1.15
*/
public Builder setServiceAccountScopes(Collection<String> serviceAccountScopes) {
this.serviceAccountScopes = serviceAccountScopes;
return this;
}
/** Returns the private key to use with the service account flow or {@code null} for none. */
public final PrivateKey getServiceAccountPrivateKey() {
return serviceAccountPrivateKey;
}
/**
* Sets the private key to use with the service account flow or {@code null} for none.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*/
public Builder setServiceAccountPrivateKey(PrivateKey serviceAccountPrivateKey) {
this.serviceAccountPrivateKey = serviceAccountPrivateKey;
return this;
}
/**
* {@link Beta} <br>
* Returns the id of the private key to use with the service account flow or {@code null} for
* none.
*/
@Beta
public final String getServiceAccountPrivateKeyId() {
return serviceAccountPrivateKeyId;
}
/**
* {@link Beta} <br>
* Sets the id of the private key to use with the service account flow or {@code null} for none.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*/
@Beta
public Builder setServiceAccountPrivateKeyId(String serviceAccountPrivateKeyId) {
this.serviceAccountPrivateKeyId = serviceAccountPrivateKeyId;
return this;
}
/**
* Sets the private key to use with the service account flow or {@code null} for none.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*
* @param p12File p12 file object
*/
public Builder setServiceAccountPrivateKeyFromP12File(File p12File)
throws GeneralSecurityException, IOException {
setServiceAccountPrivateKeyFromP12File(new FileInputStream(p12File));
return this;
}
/**
* Sets the private key to use with the service account flow or {@code null} for none.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*
* @param p12FileInputStream input stream to the p12 file. This file is closed at the end of
* this method in a finally block.
*/
public Builder setServiceAccountPrivateKeyFromP12File(InputStream p12FileInputStream)
throws GeneralSecurityException, IOException {
serviceAccountPrivateKey =
SecurityUtils.loadPrivateKeyFromKeyStore(
SecurityUtils.getPkcs12KeyStore(),
p12FileInputStream,
"notasecret",
"privatekey",
"notasecret");
return this;
}
/**
* {@link Beta} <br>
* Sets the private key to use with the service account flow or {@code null} for none.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*
* @param pemFile input stream to the PEM file (closed at the end of this method in a finally
* block)
* @since 1.13
*/
@Beta
public Builder setServiceAccountPrivateKeyFromPemFile(File pemFile)
throws GeneralSecurityException, IOException {
byte[] bytes =
PemReader.readFirstSectionAndClose(new FileReader(pemFile), "PRIVATE KEY")
.getBase64DecodedBytes();
serviceAccountPrivateKey =
SecurityUtils.getRsaKeyFactory().generatePrivate(new PKCS8EncodedKeySpec(bytes));
return this;
}
/**
* Returns the email address of the user the application is trying to impersonate in the service
* account flow or {@code null} for none.
*/
public final String getServiceAccountUser() {
return serviceAccountUser;
}
/**
* Sets the email address of the user the application is trying to impersonate in the service
* account flow or {@code null} for none.
*
* <p>Overriding is only supported for the purpose of calling the super implementation and
* changing the return type, but nothing else.
*/
public Builder setServiceAccountUser(String serviceAccountUser) {
this.serviceAccountUser = serviceAccountUser;
return this;
}
@Override
public Builder setRequestInitializer(HttpRequestInitializer requestInitializer) {
return (Builder) super.setRequestInitializer(requestInitializer);
}
@Override
public Builder addRefreshListener(CredentialRefreshListener refreshListener) {
return (Builder) super.addRefreshListener(refreshListener);
}
@Override
public Builder setRefreshListeners(Collection<CredentialRefreshListener> refreshListeners) {
return (Builder) super.setRefreshListeners(refreshListeners);
}
@Override
public Builder setTokenServerUrl(GenericUrl tokenServerUrl) {
return (Builder) super.setTokenServerUrl(tokenServerUrl);
}
@Override
public Builder setTokenServerEncodedUrl(String tokenServerEncodedUrl) {
return (Builder) super.setTokenServerEncodedUrl(tokenServerEncodedUrl);
}
@Override
public Builder setClientAuthentication(HttpExecuteInterceptor clientAuthentication) {
return (Builder) super.setClientAuthentication(clientAuthentication);
}
}
@Beta
private static GoogleCredential fromStreamUser(
GenericJson fileContents, HttpTransport transport, JsonFactory jsonFactory)
throws IOException {
String clientId = (String) fileContents.get("client_id");
String clientSecret = (String) fileContents.get("client_secret");
String refreshToken = (String) fileContents.get("refresh_token");
if (clientId == null || clientSecret == null || refreshToken == null) {
throw new IOException(
"Error reading user credential from stream, "
+ " expecting 'client_id', 'client_secret' and 'refresh_token'.");
}
GoogleCredential credential =
new GoogleCredential.Builder()
.setClientSecrets(clientId, clientSecret)
.setTransport(transport)
.setJsonFactory(jsonFactory)
.build();
credential.setRefreshToken(refreshToken);
// Do a refresh so we can fail early rather than return an unusable credential
credential.refreshToken();
return credential;
}
@Beta
private static GoogleCredential fromStreamServiceAccount(
GenericJson fileContents, HttpTransport transport, JsonFactory jsonFactory)
throws IOException {
String clientId = (String) fileContents.get("client_id");
String clientEmail = (String) fileContents.get("client_email");
String privateKeyPem = (String) fileContents.get("private_key");
String privateKeyId = (String) fileContents.get("private_key_id");
if (clientId == null || clientEmail == null || privateKeyPem == null || privateKeyId == null) {
throw new IOException(
"Error reading service account credential from stream, "
+ "expecting 'client_id', 'client_email', 'private_key' and 'private_key_id'.");
}
PrivateKey privateKey = privateKeyFromPkcs8(privateKeyPem);
Collection<String> emptyScopes = Collections.emptyList();
Builder credentialBuilder =
new GoogleCredential.Builder()
.setTransport(transport)
.setJsonFactory(jsonFactory)
.setServiceAccountId(clientEmail)
.setServiceAccountScopes(emptyScopes)
.setServiceAccountPrivateKey(privateKey)
.setServiceAccountPrivateKeyId(privateKeyId);
String tokenUri = (String) fileContents.get("token_uri");
if (tokenUri != null) {
credentialBuilder.setTokenServerEncodedUrl(tokenUri);
}
String projectId = (String) fileContents.get("project_id");
if (projectId != null) {
credentialBuilder.setServiceAccountProjectId(projectId);
}
// Don't do a refresh at this point, as it will always fail before the scopes are added.
return credentialBuilder.build();
}
@Beta
private static PrivateKey privateKeyFromPkcs8(String privateKeyPem) throws IOException {
Reader reader = new StringReader(privateKeyPem);
Section section = PemReader.readFirstSectionAndClose(reader, "PRIVATE KEY");
if (section == null) {
throw new IOException("Invalid PKCS8 data.");
}
byte[] bytes = section.getBase64DecodedBytes();
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(bytes);
Exception unexpectedException = null;
try {
KeyFactory keyFactory = SecurityUtils.getRsaKeyFactory();
PrivateKey privateKey = keyFactory.generatePrivate(keySpec);
return privateKey;
} catch (NoSuchAlgorithmException exception) {
unexpectedException = exception;
} catch (InvalidKeySpecException exception) {
unexpectedException = exception;
}
throw OAuth2Utils.exceptionWithCause(
new IOException("Unexpected exception reading PKCS data"), unexpectedException);
}
}
|
openjdk/nashorn | 35,942 | src/org.openjdk.nashorn/share/classes/org/openjdk/nashorn/internal/objects/NativeObject.java | /*
* Copyright (c) 2010, 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.openjdk.nashorn.internal.objects;
import static jdk.dynalink.StandardNamespace.METHOD;
import static jdk.dynalink.StandardNamespace.PROPERTY;
import static jdk.dynalink.StandardOperation.GET;
import static jdk.dynalink.StandardOperation.SET;
import static org.openjdk.nashorn.internal.lookup.Lookup.MH;
import static org.openjdk.nashorn.internal.runtime.ECMAErrors.typeError;
import static org.openjdk.nashorn.internal.runtime.ScriptRuntime.UNDEFINED;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.MethodType;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import jdk.dynalink.CallSiteDescriptor;
import jdk.dynalink.Operation;
import jdk.dynalink.beans.BeansLinker;
import jdk.dynalink.beans.StaticClass;
import jdk.dynalink.linker.GuardedInvocation;
import jdk.dynalink.linker.GuardingDynamicLinker;
import jdk.dynalink.linker.LinkRequest;
import jdk.dynalink.linker.support.SimpleLinkRequest;
import org.openjdk.nashorn.api.scripting.ScriptObjectMirror;
import org.openjdk.nashorn.internal.lookup.Lookup;
import org.openjdk.nashorn.internal.objects.annotations.Attribute;
import org.openjdk.nashorn.internal.objects.annotations.Constructor;
import org.openjdk.nashorn.internal.objects.annotations.Function;
import org.openjdk.nashorn.internal.objects.annotations.ScriptClass;
import org.openjdk.nashorn.internal.objects.annotations.Where;
import org.openjdk.nashorn.internal.runtime.AccessorProperty;
import org.openjdk.nashorn.internal.runtime.ECMAException;
import org.openjdk.nashorn.internal.runtime.JSType;
import org.openjdk.nashorn.internal.runtime.Property;
import org.openjdk.nashorn.internal.runtime.PropertyMap;
import org.openjdk.nashorn.internal.runtime.ScriptObject;
import org.openjdk.nashorn.internal.runtime.ScriptRuntime;
import org.openjdk.nashorn.internal.runtime.arrays.ArrayData;
import org.openjdk.nashorn.internal.runtime.arrays.ArrayIndex;
import org.openjdk.nashorn.internal.runtime.linker.Bootstrap;
import org.openjdk.nashorn.internal.runtime.linker.InvokeByName;
import org.openjdk.nashorn.internal.runtime.linker.NashornBeansLinker;
import org.openjdk.nashorn.internal.runtime.linker.NashornCallSiteDescriptor;
/**
* ECMA 15.2 Object objects
*
* JavaScript Object constructor/prototype. Note: instances of this class are
* never created. This class is not even a subclass of ScriptObject. But, we use
* this class to generate prototype and constructor for "Object".
*
*/
@ScriptClass("Object")
public final class NativeObject {
/** Methodhandle to proto getter */
public static final MethodHandle GET__PROTO__ = findOwnMH("get__proto__", ScriptObject.class, Object.class);
/** Methodhandle to proto setter */
public static final MethodHandle SET__PROTO__ = findOwnMH("set__proto__", Object.class, Object.class, Object.class);
private static final Object TO_STRING = new Object();
private static InvokeByName getTO_STRING() {
return Global.instance().getInvokeByName(TO_STRING, () ->
new InvokeByName("toString", ScriptObject.class));
}
private static final Operation GET_METHOD = GET.withNamespace(METHOD);
private static final Operation GET_PROPERTY = GET.withNamespace(PROPERTY);
private static final Operation SET_PROPERTY = SET.withNamespace(PROPERTY);
@SuppressWarnings("unused")
private static ScriptObject get__proto__(final Object self) {
// See ES6 draft spec: B.2.2.1.1 get Object.prototype.__proto__
// Step 1 Let O be the result of calling ToObject passing the this.
final ScriptObject sobj = Global.checkObject(Global.toObject(self));
return sobj.getProto();
}
@SuppressWarnings("unused")
private static Object set__proto__(final Object self, final Object proto) {
// See ES6 draft spec: B.2.2.1.2 set Object.prototype.__proto__
// Step 1
Global.checkObjectCoercible(self);
// Step 4
if (! (self instanceof ScriptObject)) {
return UNDEFINED;
}
final ScriptObject sobj = (ScriptObject)self;
// __proto__ assignment ignores non-nulls and non-objects
// step 3: If Type(proto) is neither Object nor Null, then return undefined.
if (proto == null || proto instanceof ScriptObject) {
sobj.setPrototypeOf(proto);
}
return UNDEFINED;
}
private static final MethodType MIRROR_GETTER_TYPE = MethodType.methodType(Object.class, ScriptObjectMirror.class);
private static final MethodType MIRROR_SETTER_TYPE = MethodType.methodType(Object.class, ScriptObjectMirror.class, Object.class);
// initialized by nasgen
@SuppressWarnings("unused")
private static PropertyMap $nasgenmap$;
private NativeObject() {
// don't create me!
throw new UnsupportedOperationException();
}
private static ECMAException notAnObject(final Object obj) {
return typeError("not.an.object", ScriptRuntime.safeToString(obj));
}
/**
* Nashorn extension: setIndexedPropertiesToExternalArrayData
*
* @param self self reference
* @param obj object whose index properties are backed by buffer
* @param buf external buffer - should be a nio ByteBuffer
* @return the 'obj' object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static ScriptObject setIndexedPropertiesToExternalArrayData(final Object self, final Object obj, final Object buf) {
Global.checkObject(obj);
final ScriptObject sobj = (ScriptObject)obj;
if (buf instanceof ByteBuffer) {
sobj.setArray(ArrayData.allocate((ByteBuffer)buf));
} else {
throw typeError("not.a.bytebuffer", "setIndexedPropertiesToExternalArrayData's buf argument");
}
return sobj;
}
/**
* ECMA 15.2.3.2 Object.getPrototypeOf ( O )
*
* @param self self reference
* @param obj object to get prototype from
* @return the prototype of an object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static Object getPrototypeOf(final Object self, final Object obj) {
if (obj instanceof ScriptObject) {
return ((ScriptObject)obj).getProto();
} else if (obj instanceof ScriptObjectMirror) {
return ((ScriptObjectMirror)obj).getProto();
} else {
final JSType type = JSType.of(obj);
if (type == JSType.OBJECT) {
// host (Java) objects have null __proto__
return null;
}
// must be some JS primitive
throw notAnObject(obj);
}
}
/**
* Nashorn extension: Object.setPrototypeOf ( O, proto )
* Also found in ES6 draft specification.
*
* @param self self reference
* @param obj object to set prototype for
* @param proto prototype object to be used
* @return object whose prototype is set
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static Object setPrototypeOf(final Object self, final Object obj, final Object proto) {
Global.checkObjectCoercible(obj);
if (obj instanceof ScriptObject) {
((ScriptObject)obj).setPrototypeOf(proto);
} else if (obj instanceof ScriptObjectMirror) {
((ScriptObjectMirror)obj).setProto(proto);
}
return obj;
}
/**
* ECMA 15.2.3.3 Object.getOwnPropertyDescriptor ( O, P )
*
* @param self self reference
* @param obj object from which to get property descriptor for {@code ToString(prop)}
* @param prop property descriptor
* @return property descriptor
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static Object getOwnPropertyDescriptor(final Object self, final Object obj, final Object prop) {
if (obj instanceof ScriptObject) {
final String key = JSType.toString(prop);
final ScriptObject sobj = (ScriptObject)obj;
return sobj.getOwnPropertyDescriptor(key);
} else if (obj instanceof ScriptObjectMirror) {
final String key = JSType.toString(prop);
final ScriptObjectMirror sobjMirror = (ScriptObjectMirror)obj;
return sobjMirror.getOwnPropertyDescriptor(key);
} else {
throw notAnObject(obj);
}
}
/**
* ECMA 15.2.3.4 Object.getOwnPropertyNames ( O )
*
* @param self self reference
* @param obj object to query for property names
* @return array of property names
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static ScriptObject getOwnPropertyNames(final Object self, final Object obj) {
if (obj instanceof ScriptObject) {
return new NativeArray(((ScriptObject)obj).getOwnKeys(true));
} else if (obj instanceof ScriptObjectMirror) {
return new NativeArray(((ScriptObjectMirror)obj).getOwnKeys(true));
}
final var global = Global.instance();
if (global.isES6()) {
final var obj2 = JSType.toScriptObject(global, obj);
if (obj2 instanceof ScriptObject) {
return new NativeArray(((ScriptObject)obj2).getOwnKeys(true));
}
return new NativeArray();
}
throw notAnObject(obj);
}
/**
* ECMA 2 19.1.2.8 Object.getOwnPropertySymbols ( O )
*
* @param self self reference
* @param obj object to query for property names
* @return array of property names
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static ScriptObject getOwnPropertySymbols(final Object self, final Object obj) {
final var obj2 = JSType.toScriptObject(obj);
if (obj2 instanceof ScriptObject) {
return new NativeArray(((ScriptObject)obj2).getOwnSymbols(true));
}
// TODO: we don't support this on ScriptObjectMirror objects yet
return new NativeArray();
}
/**
* ECMA 15.2.3.5 Object.create ( O [, Properties] )
*
* @param self self reference
* @param proto prototype object
* @param props properties to define
* @return object created
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static ScriptObject create(final Object self, final Object proto, final Object props) {
if (proto != null) {
Global.checkObject(proto);
}
// FIXME: should we create a proper object with correct number of
// properties?
final ScriptObject newObj = Global.newEmptyInstance();
newObj.setProto((ScriptObject)proto);
if (props != UNDEFINED) {
NativeObject.defineProperties(self, newObj, props);
}
return newObj;
}
/**
* ECMA 15.2.3.6 Object.defineProperty ( O, P, Attributes )
*
* @param self self reference
* @param obj object in which to define a property
* @param prop property to define
* @param attr attributes for property descriptor
* @return object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static ScriptObject defineProperty(final Object self, final Object obj, final Object prop, final Object attr) {
final ScriptObject sobj = Global.checkObject(obj);
sobj.defineOwnProperty(JSType.toPropertyKey(prop), attr, true);
return sobj;
}
/**
* ECMA 5.2.3.7 Object.defineProperties ( O, Properties )
*
* @param self self reference
* @param obj object in which to define properties
* @param props properties
* @return object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static ScriptObject defineProperties(final Object self, final Object obj, final Object props) {
final ScriptObject sobj = Global.checkObject(obj);
final Object propsObj = Global.toObject(props);
if (propsObj instanceof ScriptObject) {
final Object[] keys = ((ScriptObject)propsObj).getOwnKeys(false);
for (final Object key : keys) {
final String prop = JSType.toString(key);
sobj.defineOwnProperty(prop, ((ScriptObject)propsObj).get(prop), true);
}
}
return sobj;
}
/**
* ECMA 15.2.3.8 Object.seal ( O )
*
* @param self self reference
* @param obj object to seal
* @return sealed object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static Object seal(final Object self, final Object obj) {
if (obj instanceof ScriptObject) {
return ((ScriptObject)obj).seal();
} else if (obj instanceof ScriptObjectMirror) {
return ((ScriptObjectMirror)obj).seal();
} else if (isES6()) {
return obj;
} else {
throw notAnObject(obj);
}
}
/**
* ECMA 15.2.3.9 Object.freeze ( O )
*
* @param self self reference
* @param obj object to freeze
* @return frozen object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static Object freeze(final Object self, final Object obj) {
if (obj instanceof ScriptObject) {
return ((ScriptObject)obj).freeze();
} else if (obj instanceof ScriptObjectMirror) {
return ((ScriptObjectMirror)obj).freeze();
} else if (isES6()) {
return obj;
} else {
throw notAnObject(obj);
}
}
private static boolean isES6() {
return Global.instance().isES6();
}
/**
* ECMA 15.2.3.10 Object.preventExtensions ( O )
*
* @param self self reference
* @param obj object, for which to set the internal extensible property to false
* @return object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static Object preventExtensions(final Object self, final Object obj) {
if (obj instanceof ScriptObject) {
return ((ScriptObject)obj).preventExtensions();
} else if (obj instanceof ScriptObjectMirror) {
return ((ScriptObjectMirror)obj).preventExtensions();
} else if (isES6()) {
return obj;
} else {
throw notAnObject(obj);
}
}
/**
* ECMA 15.2.3.11 Object.isSealed ( O )
*
* @param self self reference
* @param obj check whether an object is sealed
* @return true if sealed, false otherwise
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static boolean isSealed(final Object self, final Object obj) {
if (obj instanceof ScriptObject) {
return ((ScriptObject)obj).isSealed();
} else if (obj instanceof ScriptObjectMirror) {
return ((ScriptObjectMirror)obj).isSealed();
} else if (isES6()) {
return true;
} else {
throw notAnObject(obj);
}
}
/**
* ECMA 15.2.3.12 Object.isFrozen ( O )
*
* @param self self reference
* @param obj check whether an object
* @return true if object is frozen, false otherwise
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static boolean isFrozen(final Object self, final Object obj) {
if (obj instanceof ScriptObject) {
return ((ScriptObject)obj).isFrozen();
} else if (obj instanceof ScriptObjectMirror) {
return ((ScriptObjectMirror)obj).isFrozen();
} else if (isES6()) {
return true;
} else {
throw notAnObject(obj);
}
}
/**
* ECMA 15.2.3.13 Object.isExtensible ( O )
*
* @param self self reference
* @param obj check whether an object is extensible
* @return true if object is extensible, false otherwise
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static boolean isExtensible(final Object self, final Object obj) {
if (obj instanceof ScriptObject) {
return ((ScriptObject)obj).isExtensible();
} else if (obj instanceof ScriptObjectMirror) {
return ((ScriptObjectMirror)obj).isExtensible();
} else if (isES6()) {
return false;
} else {
throw notAnObject(obj);
}
}
/**
* ECMA 15.2.3.14 Object.keys ( O )
*
* @param self self reference
* @param obj object from which to extract keys
* @return array of keys in object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static ScriptObject keys(final Object self, final Object obj) {
if (obj instanceof ScriptObject) {
final ScriptObject sobj = (ScriptObject)obj;
return new NativeArray(sobj.getOwnKeys(false));
} else if (obj instanceof ScriptObjectMirror) {
final ScriptObjectMirror sobjMirror = (ScriptObjectMirror)obj;
return new NativeArray(sobjMirror.getOwnKeys(false));
} else {
throw notAnObject(obj);
}
}
/**
* ECMA 15.2.2.1 , 15.2.1.1 new Object([value]) and Object([value])
*
* Constructor
*
* @param newObj is the new object instantiated with the new operator
* @param self self reference
* @param value value of object to be instantiated
* @return the new NativeObject
*/
@Constructor
public static Object construct(final boolean newObj, final Object self, final Object value) {
final JSType type = JSType.ofNoFunction(value);
// Object(null), Object(undefined), Object() are same as "new Object()"
if (newObj || type == JSType.NULL || type == JSType.UNDEFINED) {
switch (type) {
case BOOLEAN:
case NUMBER:
case STRING:
case SYMBOL:
return Global.toObject(value);
case OBJECT:
return value;
case NULL:
case UNDEFINED:
// fall through..
default:
break;
}
return Global.newEmptyInstance();
}
return Global.toObject(value);
}
/**
* ECMA 15.2.4.2 Object.prototype.toString ( )
*
* @param self self reference
* @return ToString of object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE)
public static String toString(final Object self) {
return ScriptRuntime.builtinObjectToString(self);
}
/**
* ECMA 15.2.4.3 Object.prototype.toLocaleString ( )
*
* @param self self reference
* @return localized ToString
*/
@Function(attributes = Attribute.NOT_ENUMERABLE)
public static Object toLocaleString(final Object self) {
final Object obj = JSType.toScriptObject(self);
if (obj instanceof ScriptObject) {
final InvokeByName toStringInvoker = getTO_STRING();
final ScriptObject sobj = (ScriptObject)obj;
try {
final Object toString = toStringInvoker.getGetter().invokeExact(sobj);
if (Bootstrap.isCallable(toString)) {
return toStringInvoker.getInvoker().invokeExact(toString, sobj);
}
} catch (final RuntimeException | Error e) {
throw e;
} catch (final Throwable t) {
throw new RuntimeException(t);
}
throw typeError("not.a.function", "toString");
}
return ScriptRuntime.builtinObjectToString(self);
}
/**
* ECMA 15.2.4.4 Object.prototype.valueOf ( )
*
* @param self self reference
* @return value of object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE)
public static Object valueOf(final Object self) {
return Global.toObject(self);
}
/**
* ECMA 15.2.4.5 Object.prototype.hasOwnProperty (V)
*
* @param self self reference
* @param v property to check for
* @return true if property exists in object
*/
@Function(attributes = Attribute.NOT_ENUMERABLE)
public static boolean hasOwnProperty(final Object self, final Object v) {
// Convert ScriptObjects to primitive with String.class hint
// but no need to convert other primitives to string.
final Object key = JSType.toPrimitive(v, String.class);
final Object obj = Global.toObject(self);
return obj instanceof ScriptObject && ((ScriptObject)obj).hasOwnProperty(key);
}
/**
* ECMA 15.2.4.6 Object.prototype.isPrototypeOf (V)
*
* @param self self reference
* @param v v prototype object to check against
* @return true if object is prototype of v
*/
@Function(attributes = Attribute.NOT_ENUMERABLE)
public static boolean isPrototypeOf(final Object self, final Object v) {
if (!(v instanceof ScriptObject)) {
return false;
}
final Object obj = Global.toObject(self);
ScriptObject proto = (ScriptObject)v;
do {
proto = proto.getProto();
if (proto == obj) {
return true;
}
} while (proto != null);
return false;
}
/**
* ECMA 15.2.4.7 Object.prototype.propertyIsEnumerable (V)
*
* @param self self reference
* @param v property to check if enumerable
* @return true if property is enumerable
*/
@Function(attributes = Attribute.NOT_ENUMERABLE)
public static boolean propertyIsEnumerable(final Object self, final Object v) {
final String str = JSType.toString(v);
final Object obj = Global.toObject(self);
if (obj instanceof ScriptObject) {
final ScriptObject sobj = (ScriptObject) obj;
final Property property = sobj.getProperty(str);
if (property != null) {
return property.isEnumerable();
} else {
return (sobj.getArray().has(ArrayIndex.getArrayIndex(v)));
}
}
return false;
}
/**
* Nashorn extension: Object.bindProperties
*
* Binds the source object's properties to the target object. Binding
* properties allows two-way read/write for the properties of the source object.
*
* Example:
* <pre>
* var obj = { x: 34, y: 100 };
* var foo = {}
*
* // bind properties of "obj" to "foo" object
* Object.bindProperties(foo, obj);
*
* // now, we can access/write on 'foo' properties
* print(foo.x); // prints obj.x which is 34
*
* // update obj.x via foo.x
* foo.x = "hello";
* print(obj.x); // prints "hello" now
*
* obj.x = 42; // foo.x also becomes 42
* print(foo.x); // prints 42
* </pre>
* <p>
* The source object bound can be a ScriptObject or a ScriptOjectMirror.
* null or undefined source object results in TypeError being thrown.
* </p>
* Example:
* <pre>
* var obj = loadWithNewGlobal({
* name: "test",
* script: "obj = { x: 33, y: 'hello' }"
* });
*
* // bind 'obj's properties to global scope 'this'
* Object.bindProperties(this, obj);
* print(x); // prints 33
* print(y); // prints "hello"
* x = Math.PI; // changes obj.x to Math.PI
* print(obj.x); // prints Math.PI
* </pre>
*
* Limitations of property binding:
* <ul>
* <li> Only enumerable, immediate (not proto inherited) properties of the source object are bound.
* <li> If the target object already contains a property called "foo", the source's "foo" is skipped (not bound).
* <li> Properties added to the source object after binding to the target are not bound.
* <li> Property configuration changes on the source object (or on the target) is not propagated.
* <li> Delete of property on the target (or the source) is not propagated -
* only the property value is set to 'undefined' if the property happens to be a data property.
* </ul>
* <p>
* It is recommended that the bound properties be treated as non-configurable
* properties to avoid surprises.
* </p>
*
* @param self self reference
* @param target the target object to which the source object's properties are bound
* @param source the source object whose properties are bound to the target
* @return the target object after property binding
*/
@Function(attributes = Attribute.NOT_ENUMERABLE, where = Where.CONSTRUCTOR)
public static Object bindProperties(final Object self, final Object target, final Object source) {
// target object has to be a ScriptObject
final ScriptObject targetObj = Global.checkObject(target);
// check null or undefined source object
Global.checkObjectCoercible(source);
if (source instanceof ScriptObject) {
final ScriptObject sourceObj = (ScriptObject)source;
final PropertyMap sourceMap = sourceObj.getMap();
final Property[] properties = sourceMap.getProperties();
//replace the map and blow up everything to objects to work with dual fields :-(
// filter non-enumerable properties
final ArrayList<Property> propList = new ArrayList<>();
for (final Property prop : properties) {
if (prop.isEnumerable()) {
final Object value = sourceObj.get(prop.getKey());
prop.setType(Object.class);
prop.setValue(sourceObj, sourceObj, value, false);
propList.add(prop);
}
}
if (!propList.isEmpty()) {
targetObj.addBoundProperties(sourceObj, propList.toArray(new Property[0]));
}
} else if (source instanceof ScriptObjectMirror) {
// get enumerable, immediate properties of mirror
final ScriptObjectMirror mirror = (ScriptObjectMirror)source;
final String[] keys = mirror.getOwnKeys(false);
if (keys.length == 0) {
// nothing to bind
return target;
}
// make accessor properties using dynamic invoker getters and setters
final AccessorProperty[] props = new AccessorProperty[keys.length];
for (int idx = 0; idx < keys.length; idx++) {
props[idx] = createAccessorProperty(keys[idx]);
}
targetObj.addBoundProperties(source, props);
} else if (source instanceof StaticClass) {
final Class<?> clazz = ((StaticClass)source).getRepresentedClass();
Bootstrap.checkReflectionAccess(clazz, true);
bindBeanProperties(targetObj, source, BeansLinker.getReadableStaticPropertyNames(clazz),
BeansLinker.getWritableStaticPropertyNames(clazz), BeansLinker.getStaticMethodNames(clazz));
} else {
final Class<?> clazz = source.getClass();
Bootstrap.checkReflectionAccess(clazz, false);
bindBeanProperties(targetObj, source, BeansLinker.getReadableInstancePropertyNames(clazz),
BeansLinker.getWritableInstancePropertyNames(clazz), BeansLinker.getInstanceMethodNames(clazz));
}
return target;
}
private static AccessorProperty createAccessorProperty(final String name) {
final MethodHandle getter = Bootstrap.createDynamicInvoker(name, NashornCallSiteDescriptor.GET_METHOD_PROPERTY, MIRROR_GETTER_TYPE);
final MethodHandle setter = Bootstrap.createDynamicInvoker(name, NashornCallSiteDescriptor.SET_PROPERTY, MIRROR_SETTER_TYPE);
return AccessorProperty.create(name, 0, getter, setter);
}
/**
* Binds the source mirror object's properties to the target object. Binding
* properties allows two-way read/write for the properties of the source object.
* All inherited, enumerable properties are also bound. This method is used to
* to make 'with' statement work with ScriptObjectMirror as scope object.
*
* @param target the target object to which the source object's properties are bound
* @param source the source object whose properties are bound to the target
* @return the target object after property binding
*/
public static Object bindAllProperties(final ScriptObject target, final ScriptObjectMirror source) {
final Set<String> keys = source.keySet();
// make accessor properties using dynamic invoker getters and setters
final AccessorProperty[] props = new AccessorProperty[keys.size()];
int idx = 0;
for (final String name : keys) {
props[idx] = createAccessorProperty(name);
idx++;
}
target.addBoundProperties(source, props);
return target;
}
private static void bindBeanProperties(final ScriptObject targetObj, final Object source,
final Collection<String> readablePropertyNames, final Collection<String> writablePropertyNames,
final Collection<String> methodNames) {
final Set<String> propertyNames = new HashSet<>(readablePropertyNames);
propertyNames.addAll(writablePropertyNames);
final Class<?> clazz = source.getClass();
final MethodType getterType = MethodType.methodType(Object.class, clazz);
final MethodType setterType = MethodType.methodType(Object.class, clazz, Object.class);
final GuardingDynamicLinker linker = Bootstrap.getBeanLinkerForClass(clazz);
final List<AccessorProperty> properties = new ArrayList<>(propertyNames.size() + methodNames.size());
for(final String methodName: methodNames) {
final MethodHandle method;
try {
method = getBeanOperation(linker, GET_METHOD, methodName, getterType, source);
} catch(final IllegalAccessError e) {
// Presumably, this was a caller sensitive method. Ignore it and carry on.
continue;
}
properties.add(AccessorProperty.create(methodName, Property.NOT_WRITABLE, getBoundBeanMethodGetter(source,
method), Lookup.EMPTY_SETTER));
}
for(final String propertyName: propertyNames) {
MethodHandle getter;
if(readablePropertyNames.contains(propertyName)) {
try {
getter = getBeanOperation(linker, GET_PROPERTY, propertyName, getterType, source);
} catch(final IllegalAccessError e) {
// Presumably, this was a caller sensitive method. Ignore it and carry on.
getter = Lookup.EMPTY_GETTER;
}
} else {
getter = Lookup.EMPTY_GETTER;
}
final boolean isWritable = writablePropertyNames.contains(propertyName);
MethodHandle setter;
if(isWritable) {
try {
setter = getBeanOperation(linker, SET_PROPERTY, propertyName, setterType, source);
} catch(final IllegalAccessError e) {
// Presumably, this was a caller sensitive method. Ignore it and carry on.
setter = Lookup.EMPTY_SETTER;
}
} else {
setter = Lookup.EMPTY_SETTER;
}
if(getter != Lookup.EMPTY_GETTER || setter != Lookup.EMPTY_SETTER) {
properties.add(AccessorProperty.create(propertyName, isWritable ? 0 : Property.NOT_WRITABLE, getter, setter));
}
}
targetObj.addBoundProperties(source, properties.toArray(new AccessorProperty[0]));
}
private static MethodHandle getBoundBeanMethodGetter(final Object source, final MethodHandle methodGetter) {
try {
// NOTE: we're relying on the fact that StandardOperation.GET_METHOD return value is constant for any given method
// name and object linked with BeansLinker. (Actually, an even stronger assumption is true: return value is
// constant for any given method name and object's class.)
return MethodHandles.dropArguments(MethodHandles.constant(Object.class,
Bootstrap.bindCallable(methodGetter.invoke(source), source, null)), 0, Object.class);
} catch(RuntimeException|Error e) {
throw e;
} catch(final Throwable t) {
throw new RuntimeException(t);
}
}
private static MethodHandle getBeanOperation(final GuardingDynamicLinker linker, final Operation operation,
final String name, final MethodType methodType, final Object source) {
final GuardedInvocation inv;
try {
inv = NashornBeansLinker.getGuardedInvocation(linker, createLinkRequest(operation.named(name), methodType, source), Bootstrap.getLinkerServices());
assert passesGuard(source, inv.getGuard());
} catch(RuntimeException|Error e) {
throw e;
} catch(final Throwable t) {
throw new RuntimeException(t);
}
assert inv.getSwitchPoints() == null; // Linkers in Dynalink's beans package don't use switchpoints.
// We discard the guard, as all method handles will be bound to a specific object.
return inv.getInvocation();
}
private static boolean passesGuard(final Object obj, final MethodHandle guard) throws Throwable {
return guard == null || (boolean)guard.invoke(obj);
}
private static LinkRequest createLinkRequest(final Operation operation, final MethodType methodType, final Object source) {
return new SimpleLinkRequest(new CallSiteDescriptor(MethodHandles.publicLookup(), operation,
methodType), false, source);
}
private static MethodHandle findOwnMH(final String name, final Class<?> rtype, final Class<?>... types) {
return MH.findStatic(MethodHandles.lookup(), NativeObject.class, name, MH.type(rtype, types));
}
}
|
apache/hadoop | 35,852 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/main/java/org/apache/hadoop/yarn/client/api/YarnClient.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.client.api;
import java.io.IOException;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceAudience.Public;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewReservationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationListRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationListResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationTimeoutsResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerReport;
import org.apache.hadoop.yarn.api.records.NodeAttribute;
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeLabel;
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.NodeToAttributeValue;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
import org.apache.hadoop.yarn.api.records.ReservationDefinition;
import org.apache.hadoop.yarn.api.records.ReservationId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceTypeInfo;
import org.apache.hadoop.yarn.api.records.ShellContainerCommand;
import org.apache.hadoop.yarn.api.records.SignalContainerCommand;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
import org.apache.hadoop.yarn.client.api.impl.YarnClientImpl;
import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException;
import org.apache.hadoop.yarn.exceptions.ApplicationIdNotProvidedException;
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException;
import org.apache.hadoop.yarn.exceptions.YARNFeatureNotEnabledException;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class YarnClient extends AbstractService {
/**
* Create a new instance of YarnClient.
*/
@Public
public static YarnClient createYarnClient() {
YarnClient client = new YarnClientImpl();
return client;
}
@Private
protected YarnClient(String name) {
super(name);
}
/**
* <p>
* Obtain a {@link YarnClientApplication} for a new application,
* which in turn contains the {@link ApplicationSubmissionContext} and
* {@link org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse}
* objects.
* </p>
*
* @return {@link YarnClientApplication} built for a new application
* @throws YarnException
* @throws IOException
*/
public abstract YarnClientApplication createApplication()
throws YarnException, IOException;
/**
* <p>
* Submit a new application to <code>YARN.</code> It is a blocking call - it
* will not return {@link ApplicationId} until the submitted application is
* submitted successfully and accepted by the ResourceManager.
* </p>
*
* <p>
* Users should provide an {@link ApplicationId} as part of the parameter
* {@link ApplicationSubmissionContext} when submitting a new application,
* otherwise it will throw the {@link ApplicationIdNotProvidedException}.
* </p>
*
* <p>This internally calls {@link ApplicationClientProtocol#submitApplication
* (SubmitApplicationRequest)}, and after that, it internally invokes
* {@link ApplicationClientProtocol#getApplicationReport
* (GetApplicationReportRequest)} and waits till it can make sure that the
* application gets properly submitted. If RM fails over or RM restart
* happens before ResourceManager saves the application's state,
* {@link ApplicationClientProtocol
* #getApplicationReport(GetApplicationReportRequest)} will throw
* the {@link ApplicationNotFoundException}. This API automatically resubmits
* the application with the same {@link ApplicationSubmissionContext} when it
* catches the {@link ApplicationNotFoundException}</p>
*
* @param appContext
* {@link ApplicationSubmissionContext} containing all the details
* needed to submit a new application
* @return {@link ApplicationId} of the accepted application
* @throws YarnException
* @throws IOException
* @see #createApplication()
*/
public abstract ApplicationId submitApplication(
ApplicationSubmissionContext appContext) throws YarnException,
IOException;
/**
* <p>
* Fail an application attempt identified by given ID.
* </p>
*
* @param applicationAttemptId
* {@link ApplicationAttemptId} of the attempt to fail.
* @throws YarnException
* in case of errors or if YARN rejects the request due to
* access-control restrictions.
* @throws IOException
* @see #getQueueAclsInfo()
*/
public abstract void failApplicationAttempt(
ApplicationAttemptId applicationAttemptId) throws YarnException,
IOException;
/**
* <p>
* Kill an application identified by given ID.
* </p>
*
* @param applicationId
* {@link ApplicationId} of the application that needs to be killed
* @throws YarnException
* in case of errors or if YARN rejects the request due to
* access-control restrictions.
* @throws IOException
* @see #getQueueAclsInfo()
*/
public abstract void killApplication(ApplicationId applicationId) throws YarnException,
IOException;
/**
* <p>
* Kill an application identified by given ID.
* </p>
* @param applicationId {@link ApplicationId} of the application that needs to
* be killed
* @param diagnostics for killing an application.
* @throws YarnException in case of errors or if YARN rejects the request due
* to access-control restrictions.
* @throws IOException
*/
public abstract void killApplication(ApplicationId applicationId,
String diagnostics) throws YarnException, IOException;
/**
* <p>
* Get a report of the given Application.
* </p>
*
* <p>
* In secure mode, <code>YARN</code> verifies access to the application, queue
* etc. before accepting the request.
* </p>
*
* <p>
* If the user does not have <code>VIEW_APP</code> access then the following
* fields in the report will be set to stubbed values:
* <ul>
* <li>host - set to "N/A"</li>
* <li>RPC port - set to -1</li>
* <li>client token - set to "N/A"</li>
* <li>diagnostics - set to "N/A"</li>
* <li>tracking URL - set to "N/A"</li>
* <li>original tracking URL - set to "N/A"</li>
* <li>resource usage report - all values are -1</li>
* </ul>
*
* @param appId
* {@link ApplicationId} of the application that needs a report
* @return application report
* @throws YarnException
* @throws IOException
*/
public abstract ApplicationReport getApplicationReport(ApplicationId appId)
throws YarnException, IOException;
/**
* Get the AMRM token of the application.
* <p>
* The AMRM token is required for AM to RM scheduling operations. For
* managed Application Masters YARN takes care of injecting it. For unmanaged
* Applications Masters, the token must be obtained via this method and set
* in the {@link org.apache.hadoop.security.UserGroupInformation} of the
* current user.
* <p>
* The AMRM token will be returned only if all the following conditions are
* met:
* <ul>
* <li>the requester is the owner of the ApplicationMaster</li>
* <li>the application master is an unmanaged ApplicationMaster</li>
* <li>the application master is in ACCEPTED state</li>
* </ul>
* Else this method returns NULL.
*
* @param appId {@link ApplicationId} of the application to get the AMRM token
* @return the AMRM token if available
* @throws YarnException
* @throws IOException
*/
public abstract org.apache.hadoop.security.token.Token<AMRMTokenIdentifier>
getAMRMToken(ApplicationId appId) throws YarnException, IOException;
/**
* <p>
* Get a report (ApplicationReport) of all Applications in the cluster.
* </p>
*
* <p>
* If the user does not have <code>VIEW_APP</code> access for an application
* then the corresponding report will be filtered as described in
* {@link #getApplicationReport(ApplicationId)}.
* </p>
*
* @return a list of reports of all running applications
* @throws YarnException
* @throws IOException
*/
public abstract List<ApplicationReport> getApplications()
throws YarnException, IOException;
/**
* <p>
* Get a report (ApplicationReport) of Applications
* matching the given application types in the cluster.
* </p>
*
* <p>
* If the user does not have <code>VIEW_APP</code> access for an application
* then the corresponding report will be filtered as described in
* {@link #getApplicationReport(ApplicationId)}.
* </p>
*
* @param applicationTypes set of application types you are interested in
* @return a list of reports of applications
* @throws YarnException
* @throws IOException
*/
public abstract List<ApplicationReport> getApplications(
Set<String> applicationTypes) throws YarnException, IOException;
/**
* <p>
* Get a report (ApplicationReport) of Applications matching the given
* application states in the cluster.
* </p>
*
* <p>
* If the user does not have <code>VIEW_APP</code> access for an application
* then the corresponding report will be filtered as described in
* {@link #getApplicationReport(ApplicationId)}.
* </p>
*
* @param applicationStates set of application states you are interested in
* @return a list of reports of applications
* @throws YarnException
* @throws IOException
*/
public abstract List<ApplicationReport>
getApplications(EnumSet<YarnApplicationState> applicationStates)
throws YarnException, IOException;
/**
* <p>
* Get a report (ApplicationReport) of Applications matching the given
* application types and application states in the cluster.
* </p>
*
* <p>
* If the user does not have <code>VIEW_APP</code> access for an application
* then the corresponding report will be filtered as described in
* {@link #getApplicationReport(ApplicationId)}.
* </p>
*
* @param applicationTypes set of application types you are interested in
* @param applicationStates set of application states you are interested in
* @return a list of reports of applications
* @throws YarnException
* @throws IOException
*/
public abstract List<ApplicationReport> getApplications(
Set<String> applicationTypes,
EnumSet<YarnApplicationState> applicationStates) throws YarnException,
IOException;
/**
* <p>
* Get a report (ApplicationReport) of Applications matching the given
* application types, application states and application tags in the cluster.
* </p>
*
* <p>
* If the user does not have <code>VIEW_APP</code> access for an application
* then the corresponding report will be filtered as described in
* {@link #getApplicationReport(ApplicationId)}.
* </p>
*
* @param applicationTypes set of application types you are interested in
* @param applicationStates set of application states you are interested in
* @param applicationTags set of application tags you are interested in
* @return a list of reports of applications
* @throws YarnException
* @throws IOException
*/
public abstract List<ApplicationReport> getApplications(
Set<String> applicationTypes,
EnumSet<YarnApplicationState> applicationStates,
Set<String> applicationTags) throws YarnException,
IOException;
/**
* <p>
* Get a report (ApplicationReport) of Applications matching the given users,
* queues, application types and application states in the cluster. If any of
* the params is set to null, it is not used when filtering.
* </p>
*
* <p>
* If the user does not have <code>VIEW_APP</code> access for an application
* then the corresponding report will be filtered as described in
* {@link #getApplicationReport(ApplicationId)}.
* </p>
*
* @param queues set of queues you are interested in
* @param users set of users you are interested in
* @param applicationTypes set of application types you are interested in
* @param applicationStates set of application states you are interested in
* @return a list of reports of applications
* @throws YarnException
* @throws IOException
*/
public abstract List<ApplicationReport> getApplications(Set<String> queues,
Set<String> users, Set<String> applicationTypes,
EnumSet<YarnApplicationState> applicationStates) throws YarnException,
IOException;
/**
* <p>
* Get a list of ApplicationReports that match the given
* {@link GetApplicationsRequest}.
*</p>
*
* <p>
* If the user does not have <code>VIEW_APP</code> access for an application
* then the corresponding report will be filtered as described in
* {@link #getApplicationReport(ApplicationId)}.
* </p>
*
* @param request the request object to get the list of applications.
* @return The list of ApplicationReports that match the request
* @throws YarnException Exception specific to YARN.
* @throws IOException Exception mostly related to connection errors.
*/
public List<ApplicationReport> getApplications(GetApplicationsRequest request)
throws YarnException, IOException {
throw new UnsupportedOperationException(
"The sub-class extending " + YarnClient.class.getName()
+ " is expected to implement this !");
}
/**
* <p>
* Get metrics ({@link YarnClusterMetrics}) about the cluster.
* </p>
*
* @return cluster metrics
* @throws YarnException
* @throws IOException
*/
public abstract YarnClusterMetrics getYarnClusterMetrics() throws YarnException,
IOException;
/**
* <p>
* Get a report of nodes ({@link NodeReport}) in the cluster.
* </p>
*
* @param states The {@link NodeState}s to filter on. If no filter states are
* given, nodes in all states will be returned.
* @return A list of node reports
* @throws YarnException
* @throws IOException
*/
public abstract List<NodeReport> getNodeReports(NodeState... states)
throws YarnException, IOException;
/**
* <p>
* Get a delegation token so as to be able to talk to YARN using those tokens.
*
* @param renewer
* Address of the renewer who can renew these tokens when needed by
* securely talking to YARN.
* @return a delegation token ({@link Token}) that can be used to
* talk to YARN
* @throws YarnException
* @throws IOException
*/
public abstract Token getRMDelegationToken(Text renewer)
throws YarnException, IOException;
/**
* <p>
* Get information ({@link QueueInfo}) about a given <em>queue</em>.
* </p>
*
* @param queueName
* Name of the queue whose information is needed
* @return queue information
* @throws YarnException
* in case of errors or if YARN rejects the request due to
* access-control restrictions.
* @throws IOException I/O exception has occurred.
*/
public abstract QueueInfo getQueueInfo(String queueName) throws YarnException,
IOException;
/**
* <p>
* Get information ({@link QueueInfo}) about a given <em>queue</em>.
* </p>
*
* @param queueName
* Name of the queue whose information is needed.
* @param subClusterId sub-cluster Id.
* @return queue information.
* @throws YarnException
* in case of errors or if YARN rejects the request due to
* access-control restrictions.
* @throws IOException I/O exception has occurred.
*/
public abstract QueueInfo getQueueInfo(String queueName, String subClusterId)
throws YarnException, IOException;
/**
* <p>
* Get information ({@link QueueInfo}) about all queues, recursively if there
* is a hierarchy
* </p>
*
* @return a list of queue-information for all queues
* @throws YarnException
* @throws IOException
*/
public abstract List<QueueInfo> getAllQueues() throws YarnException, IOException;
/**
* <p>
* Get information ({@link QueueInfo}) about top level queues.
* </p>
*
* @return a list of queue-information for all the top-level queues
* @throws YarnException
* @throws IOException
*/
public abstract List<QueueInfo> getRootQueueInfos() throws YarnException, IOException;
/**
* <p>
* Get information ({@link QueueInfo}) about all the immediate children queues
* of the given queue
* </p>
*
* @param parent
* Name of the queue whose child-queues' information is needed
* @return a list of queue-information for all queues who are direct children
* of the given parent queue.
* @throws YarnException
* @throws IOException
*/
public abstract List<QueueInfo> getChildQueueInfos(String parent) throws YarnException,
IOException;
/**
* <p>
* Get information about <em>acls</em> for <em>current user</em> on all the
* existing queues.
* </p>
*
* @return a list of queue acls ({@link QueueUserACLInfo}) for
* <em>current user</em>
* @throws YarnException
* @throws IOException
*/
public abstract List<QueueUserACLInfo> getQueueAclsInfo() throws YarnException,
IOException;
/**
* <p>
* Get a report of the given ApplicationAttempt.
* </p>
*
* <p>
* In secure mode, <code>YARN</code> verifies access to the application, queue
* etc. before accepting the request.
* </p>
*
* @param applicationAttemptId
* {@link ApplicationAttemptId} of the application attempt that needs
* a report
* @return application attempt report
* @throws YarnException
* @throws ApplicationAttemptNotFoundException if application attempt
* not found
* @throws IOException
*/
public abstract ApplicationAttemptReport getApplicationAttemptReport(
ApplicationAttemptId applicationAttemptId) throws YarnException, IOException;
/**
* <p>
* Get a report of all (ApplicationAttempts) of Application in the cluster.
* </p>
*
* @param applicationId application id of the app
* @return a list of reports for all application attempts for specified
* application.
* @throws YarnException
* @throws IOException
*/
public abstract List<ApplicationAttemptReport> getApplicationAttempts(
ApplicationId applicationId) throws YarnException, IOException;
/**
* <p>
* Get a report of the given Container.
* </p>
*
* <p>
* In secure mode, <code>YARN</code> verifies access to the application, queue
* etc. before accepting the request.
* </p>
*
* @param containerId
* {@link ContainerId} of the container that needs a report
* @return container report
* @throws YarnException
* @throws ContainerNotFoundException if container not found.
* @throws IOException
*/
public abstract ContainerReport getContainerReport(ContainerId containerId)
throws YarnException, IOException;
/**
* <p>
* Get a report of all (Containers) of ApplicationAttempt in the cluster.
* </p>
*
* @param applicationAttemptId application attempt id
* @return a list of reports of all containers for specified application
* attempts
* @throws YarnException
* @throws IOException
*/
public abstract List<ContainerReport> getContainers(
ApplicationAttemptId applicationAttemptId) throws YarnException,
IOException;
/**
* <p>
* Attempts to move the given application to the given queue.
* </p>
*
* @param appId
* Application to move.
* @param queue
* Queue to place it in to.
* @throws YarnException
* @throws IOException
*/
public abstract void moveApplicationAcrossQueues(ApplicationId appId,
String queue) throws YarnException, IOException;
/**
* <p>
* Obtain a {@link GetNewReservationResponse} for a new reservation,
* which contains the {@link ReservationId} object.
* </p>
*
* @return The {@link GetNewReservationResponse} containing a new
* {@link ReservationId} object.
* @throws YarnException if reservation cannot be created.
* @throws IOException if reservation cannot be created.
*/
public abstract GetNewReservationResponse createReservation()
throws YarnException, IOException;
/**
* <p>
* The interface used by clients to submit a new reservation to the
* {@code ResourceManager}.
* </p>
*
* <p>
* The client packages all details of its request in a
* {@link ReservationSubmissionRequest} object. This contains information
* about the amount of capacity, temporal constraints, and gang needs.
* Furthermore, the reservation might be composed of multiple stages, with
* ordering dependencies among them.
* </p>
*
* <p>
* In order to respond, a new admission control component in the
* {@code ResourceManager} performs an analysis of the resources that have
* been committed over the period of time the user is requesting, verify that
* the user requests can be fulfilled, and that it respect a sharing policy
* (e.g., {@code CapacityOverTimePolicy}). Once it has positively determined
* that the ReservationRequest is satisfiable the {@code ResourceManager}
* answers with a {@link ReservationSubmissionResponse} that includes a
* {@link ReservationId}. Upon failure to find a valid allocation the response
* is an exception with the message detailing the reason of failure.
* </p>
*
* <p>
* The semantics guarantees that the {@link ReservationId} returned,
* corresponds to a valid reservation existing in the time-range request by
* the user. The amount of capacity dedicated to such reservation can vary
* overtime, depending of the allocation that has been determined. But it is
* guaranteed to satisfy all the constraint expressed by the user in the
* {@link ReservationDefinition}
* </p>
*
* @param request request to submit a new Reservation
* @return response contains the {@link ReservationId} on accepting the
* submission
* @throws YarnException if the reservation cannot be created successfully
* @throws IOException
*
*/
@Public
@Unstable
public abstract ReservationSubmissionResponse submitReservation(
ReservationSubmissionRequest request) throws YarnException, IOException;
/**
* <p>
* The interface used by clients to update an existing Reservation. This is
* referred to as a re-negotiation process, in which a user that has
* previously submitted a Reservation.
* </p>
*
* <p>
* The allocation is attempted by virtually substituting all previous
* allocations related to this Reservation with new ones, that satisfy the new
* {@link ReservationDefinition}. Upon success the previous allocation is
* atomically substituted by the new one, and on failure (i.e., if the system
* cannot find a valid allocation for the updated request), the previous
* allocation remains valid.
* </p>
*
* @param request to update an existing Reservation (the
* {@link ReservationUpdateRequest} should refer to an existing valid
* {@link ReservationId})
* @return response empty on successfully updating the existing reservation
* @throws YarnException if the request is invalid or reservation cannot be
* updated successfully
* @throws IOException
*
*/
@Public
@Unstable
public abstract ReservationUpdateResponse updateReservation(
ReservationUpdateRequest request) throws YarnException, IOException;
/**
* <p>
* The interface used by clients to remove an existing Reservation.
* </p>
*
* @param request to remove an existing Reservation (the
* {@link ReservationDeleteRequest} should refer to an existing valid
* {@link ReservationId})
* @return response empty on successfully deleting the existing reservation
* @throws YarnException if the request is invalid or reservation cannot be
* deleted successfully
* @throws IOException
*
*/
@Public
@Unstable
public abstract ReservationDeleteResponse deleteReservation(
ReservationDeleteRequest request) throws YarnException, IOException;
/**
* <p>
* The interface used by clients to get the list of reservations in a plan.
* The reservationId will be used to search for reservations to list if it is
* provided. Otherwise, it will select active reservations within the
* startTime and endTime (inclusive).
* </p>
*
* @param request to list reservations in a plan. Contains fields to select
* String queue, ReservationId reservationId, long startTime,
* long endTime, and a bool includeReservationAllocations.
*
* queue: Required. Cannot be null or empty. Refers to the
* reservable queue in the scheduler that was selected when
* creating a reservation submission
* {@link ReservationSubmissionRequest}.
*
* reservationId: Optional. If provided, other fields will
* be ignored.
*
* startTime: Optional. If provided, only reservations that
* end after the startTime will be selected. This defaults
* to 0 if an invalid number is used.
*
* endTime: Optional. If provided, only reservations that
* start on or before endTime will be selected. This defaults
* to Long.MAX_VALUE if an invalid number is used.
*
* includeReservationAllocations: Optional. Flag that
* determines whether the entire reservation allocations are
* to be returned. Reservation allocations are subject to
* change in the event of re-planning as described by
* {@link ReservationDefinition}.
*
* @return response that contains information about reservations that are
* being searched for.
* @throws YarnException if the request is invalid
* @throws IOException if the request failed otherwise
*
*/
@Public
@Unstable
public abstract ReservationListResponse listReservations(
ReservationListRequest request) throws YarnException, IOException;
/**
* <p>
* The interface used by client to get node to labels mappings in existing cluster
* </p>
*
* @return node to labels mappings
* @throws YarnException
* @throws IOException
*/
@Public
@Unstable
public abstract Map<NodeId, Set<String>> getNodeToLabels()
throws YarnException, IOException;
/**
* <p>
* The interface used by client to get labels to nodes mapping
* in existing cluster
* </p>
*
* @return node to labels mappings
* @throws YarnException
* @throws IOException
*/
@Public
@Unstable
public abstract Map<String, Set<NodeId>> getLabelsToNodes()
throws YarnException, IOException;
/**
* <p>
* The interface used by client to get labels to nodes mapping
* for specified labels in existing cluster
* </p>
*
* @param labels labels for which labels to nodes mapping has to be retrieved
* @return labels to nodes mappings for specific labels
* @throws YarnException
* @throws IOException
*/
@Public
@Unstable
public abstract Map<String, Set<NodeId>> getLabelsToNodes(
Set<String> labels) throws YarnException, IOException;
/**
* <p>
* The interface used by client to get node labels in the cluster
* </p>
*
* @return cluster node labels collection
* @throws YarnException when there is a failure in
* {@link ApplicationClientProtocol}
* @throws IOException when there is a failure in
* {@link ApplicationClientProtocol}
*/
@Public
@Unstable
public abstract List<NodeLabel> getClusterNodeLabels()
throws YarnException, IOException;
/**
* <p>
* The interface used by client to set priority of an application
* </p>
* @param applicationId
* @param priority
* @return updated priority of an application.
* @throws YarnException
* @throws IOException
*/
@Public
@Unstable
public abstract Priority updateApplicationPriority(
ApplicationId applicationId,
Priority priority) throws YarnException, IOException;
/**
* <p>
* Signal a container identified by given ID.
* </p>
*
* @param containerId
* {@link ContainerId} of the container that needs to be signaled
* @param command the signal container command
* @throws YarnException
* @throws IOException
*/
public abstract void signalToContainer(ContainerId containerId,
SignalContainerCommand command) throws YarnException, IOException;
@Public
@Unstable
public UpdateApplicationTimeoutsResponse updateApplicationTimeouts(
UpdateApplicationTimeoutsRequest request)
throws YarnException, IOException {
throw new UnsupportedOperationException("The sub-class extending "
+ YarnClient.class.getName() + " is expected to implement this !");
}
/**
* <p>
* Get the resource profiles available in the RM.
* </p>
* @return a Map of the resource profile names to their capabilities
* @throws YARNFeatureNotEnabledException if resource-profile is disabled
* @throws YarnException if any error happens inside YARN
* @throws IOException in case of other errors
*/
@Public
@Unstable
public abstract Map<String, Resource> getResourceProfiles()
throws YarnException, IOException;
/**
* <p>
* Get the details of a specific resource profile from the RM.
* </p>
* @param profile the profile name
* @return resource profile name with its capabilities
* @throws YARNFeatureNotEnabledException if resource-profile is disabled
* @throws YarnException if any error happens inside YARN
* @throws IOException in case of other others
*/
@Public
@Unstable
public abstract Resource getResourceProfile(String profile)
throws YarnException, IOException;
/**
* <p>
* Get available resource types supported by RM.
* </p>
* @return list of supported resource types with detailed information
* @throws YarnException if any issue happens inside YARN
* @throws IOException in case of other others
*/
@Public
@Unstable
public abstract List<ResourceTypeInfo> getResourceTypeInfo()
throws YarnException, IOException;
/**
* <p>
* The interface used by client to get node attributes in the cluster.
* </p>
*
* @return cluster node attributes collection
* @throws YarnException when there is a failure in
* {@link ApplicationClientProtocol}
* @throws IOException when there is a failure in
* {@link ApplicationClientProtocol}
*/
@Public
@Unstable
public abstract Set<NodeAttributeInfo> getClusterAttributes()
throws YarnException, IOException;
/**
* <p>
* The interface used by client to get mapping of AttributeKey to associated
* NodeToAttributeValue list for specified node attributeKeys in the cluster.
* </p>
*
* @param attributes AttributeKeys for which associated NodeToAttributeValue
* mapping value has to be retrieved. If empty or null is set then
* will return mapping for all attributeKeys in the cluster
* @return mapping of AttributeKey to List of associated
* NodeToAttributeValue's.
* @throws YarnException
* @throws IOException
*/
@Public
@Unstable
public abstract Map<NodeAttributeKey,
List<NodeToAttributeValue>> getAttributesToNodes(
Set<NodeAttributeKey> attributes) throws YarnException, IOException;
/**
* <p>
* The interface used by client to get all node to attribute mapping in
* existing cluster.
* </p>
*
* @param hostNames HostNames for which host to attributes mapping has to
* be retrived.If empty or null is set then will return
* all nodes to attributes mapping in cluster.
* @return Node to attribute mappings
* @throws YarnException
* @throws IOException
*/
@Public
@Unstable
public abstract Map<String, Set<NodeAttribute>> getNodeToAttributes(
Set<String> hostNames) throws YarnException, IOException;
/**
* <p>
* The interface used by client to get a shell to a container.
* </p>
*
* @param containerId Container ID
* @param command Shell type
* @throws IOException if connection fails.
*/
@Public
@Unstable
public abstract void shellToContainer(ContainerId containerId,
ShellContainerCommand command) throws IOException;
}
|
googleapis/google-cloud-java | 35,670 | java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/ListDocumentsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2/document.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2;
/**
*
*
* <pre>
* Response message for
* [Documents.ListDocuments][google.cloud.dialogflow.v2.Documents.ListDocuments].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListDocumentsResponse}
*/
public final class ListDocumentsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.ListDocumentsResponse)
ListDocumentsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDocumentsResponse.newBuilder() to construct.
private ListDocumentsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDocumentsResponse() {
documents_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDocumentsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.DocumentProto
.internal_static_google_cloud_dialogflow_v2_ListDocumentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.DocumentProto
.internal_static_google_cloud_dialogflow_v2_ListDocumentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListDocumentsResponse.class,
com.google.cloud.dialogflow.v2.ListDocumentsResponse.Builder.class);
}
public static final int DOCUMENTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.v2.Document> documents_;
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.v2.Document> getDocumentsList() {
return documents_;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.v2.DocumentOrBuilder>
getDocumentsOrBuilderList() {
return documents_;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
@java.lang.Override
public int getDocumentsCount() {
return documents_.size();
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.Document getDocuments(int index) {
return documents_.get(index);
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2.DocumentOrBuilder getDocumentsOrBuilder(int index) {
return documents_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < documents_.size(); i++) {
output.writeMessage(1, documents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < documents_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, documents_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2.ListDocumentsResponse)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2.ListDocumentsResponse other =
(com.google.cloud.dialogflow.v2.ListDocumentsResponse) obj;
if (!getDocumentsList().equals(other.getDocumentsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDocumentsCount() > 0) {
hash = (37 * hash) + DOCUMENTS_FIELD_NUMBER;
hash = (53 * hash) + getDocumentsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.dialogflow.v2.ListDocumentsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [Documents.ListDocuments][google.cloud.dialogflow.v2.Documents.ListDocuments].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2.ListDocumentsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.ListDocumentsResponse)
com.google.cloud.dialogflow.v2.ListDocumentsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2.DocumentProto
.internal_static_google_cloud_dialogflow_v2_ListDocumentsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2.DocumentProto
.internal_static_google_cloud_dialogflow_v2_ListDocumentsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2.ListDocumentsResponse.class,
com.google.cloud.dialogflow.v2.ListDocumentsResponse.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2.ListDocumentsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (documentsBuilder_ == null) {
documents_ = java.util.Collections.emptyList();
} else {
documents_ = null;
documentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2.DocumentProto
.internal_static_google_cloud_dialogflow_v2_ListDocumentsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListDocumentsResponse getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2.ListDocumentsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListDocumentsResponse build() {
com.google.cloud.dialogflow.v2.ListDocumentsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListDocumentsResponse buildPartial() {
com.google.cloud.dialogflow.v2.ListDocumentsResponse result =
new com.google.cloud.dialogflow.v2.ListDocumentsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.v2.ListDocumentsResponse result) {
if (documentsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
documents_ = java.util.Collections.unmodifiableList(documents_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.documents_ = documents_;
} else {
result.documents_ = documentsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dialogflow.v2.ListDocumentsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2.ListDocumentsResponse) {
return mergeFrom((com.google.cloud.dialogflow.v2.ListDocumentsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2.ListDocumentsResponse other) {
if (other == com.google.cloud.dialogflow.v2.ListDocumentsResponse.getDefaultInstance())
return this;
if (documentsBuilder_ == null) {
if (!other.documents_.isEmpty()) {
if (documents_.isEmpty()) {
documents_ = other.documents_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDocumentsIsMutable();
documents_.addAll(other.documents_);
}
onChanged();
}
} else {
if (!other.documents_.isEmpty()) {
if (documentsBuilder_.isEmpty()) {
documentsBuilder_.dispose();
documentsBuilder_ = null;
documents_ = other.documents_;
bitField0_ = (bitField0_ & ~0x00000001);
documentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDocumentsFieldBuilder()
: null;
} else {
documentsBuilder_.addAllMessages(other.documents_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dialogflow.v2.Document m =
input.readMessage(
com.google.cloud.dialogflow.v2.Document.parser(), extensionRegistry);
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
documents_.add(m);
} else {
documentsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.v2.Document> documents_ =
java.util.Collections.emptyList();
private void ensureDocumentsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
documents_ = new java.util.ArrayList<com.google.cloud.dialogflow.v2.Document>(documents_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.Document,
com.google.cloud.dialogflow.v2.Document.Builder,
com.google.cloud.dialogflow.v2.DocumentOrBuilder>
documentsBuilder_;
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2.Document> getDocumentsList() {
if (documentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(documents_);
} else {
return documentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public int getDocumentsCount() {
if (documentsBuilder_ == null) {
return documents_.size();
} else {
return documentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Document getDocuments(int index) {
if (documentsBuilder_ == null) {
return documents_.get(index);
} else {
return documentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public Builder setDocuments(int index, com.google.cloud.dialogflow.v2.Document value) {
if (documentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentsIsMutable();
documents_.set(index, value);
onChanged();
} else {
documentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public Builder setDocuments(
int index, com.google.cloud.dialogflow.v2.Document.Builder builderForValue) {
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
documents_.set(index, builderForValue.build());
onChanged();
} else {
documentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public Builder addDocuments(com.google.cloud.dialogflow.v2.Document value) {
if (documentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentsIsMutable();
documents_.add(value);
onChanged();
} else {
documentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public Builder addDocuments(int index, com.google.cloud.dialogflow.v2.Document value) {
if (documentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDocumentsIsMutable();
documents_.add(index, value);
onChanged();
} else {
documentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public Builder addDocuments(com.google.cloud.dialogflow.v2.Document.Builder builderForValue) {
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
documents_.add(builderForValue.build());
onChanged();
} else {
documentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public Builder addDocuments(
int index, com.google.cloud.dialogflow.v2.Document.Builder builderForValue) {
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
documents_.add(index, builderForValue.build());
onChanged();
} else {
documentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public Builder addAllDocuments(
java.lang.Iterable<? extends com.google.cloud.dialogflow.v2.Document> values) {
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, documents_);
onChanged();
} else {
documentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public Builder clearDocuments() {
if (documentsBuilder_ == null) {
documents_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
documentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public Builder removeDocuments(int index) {
if (documentsBuilder_ == null) {
ensureDocumentsIsMutable();
documents_.remove(index);
onChanged();
} else {
documentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Document.Builder getDocumentsBuilder(int index) {
return getDocumentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public com.google.cloud.dialogflow.v2.DocumentOrBuilder getDocumentsOrBuilder(int index) {
if (documentsBuilder_ == null) {
return documents_.get(index);
} else {
return documentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dialogflow.v2.DocumentOrBuilder>
getDocumentsOrBuilderList() {
if (documentsBuilder_ != null) {
return documentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(documents_);
}
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Document.Builder addDocumentsBuilder() {
return getDocumentsFieldBuilder()
.addBuilder(com.google.cloud.dialogflow.v2.Document.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public com.google.cloud.dialogflow.v2.Document.Builder addDocumentsBuilder(int index) {
return getDocumentsFieldBuilder()
.addBuilder(index, com.google.cloud.dialogflow.v2.Document.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of documents.
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2.Document documents = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2.Document.Builder>
getDocumentsBuilderList() {
return getDocumentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.Document,
com.google.cloud.dialogflow.v2.Document.Builder,
com.google.cloud.dialogflow.v2.DocumentOrBuilder>
getDocumentsFieldBuilder() {
if (documentsBuilder_ == null) {
documentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2.Document,
com.google.cloud.dialogflow.v2.Document.Builder,
com.google.cloud.dialogflow.v2.DocumentOrBuilder>(
documents_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
documents_ = null;
}
return documentsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.ListDocumentsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.ListDocumentsResponse)
private static final com.google.cloud.dialogflow.v2.ListDocumentsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.ListDocumentsResponse();
}
public static com.google.cloud.dialogflow.v2.ListDocumentsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDocumentsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDocumentsResponse>() {
@java.lang.Override
public ListDocumentsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDocumentsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDocumentsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2.ListDocumentsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hop | 35,834 | plugins/tech/google/src/main/java/org/apache/hop/pipeline/transforms/googleanalytics/GoogleAnalyticsDialog.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.hop.pipeline.transforms.googleanalytics;
import com.google.analytics.data.v1beta.BetaAnalyticsDataClient;
import com.google.analytics.data.v1beta.BetaAnalyticsDataSettings;
import com.google.analytics.data.v1beta.DateRange;
import com.google.analytics.data.v1beta.Dimension;
import com.google.analytics.data.v1beta.DimensionHeader;
import com.google.analytics.data.v1beta.Metric;
import com.google.analytics.data.v1beta.MetricHeader;
import com.google.analytics.data.v1beta.MetricType;
import com.google.analytics.data.v1beta.RunReportRequest;
import com.google.analytics.data.v1beta.RunReportResponse;
import com.google.api.gax.core.FixedCredentialsProvider;
import com.google.auth.Credentials;
import com.google.auth.oauth2.ServiceAccountCredentials;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.hop.core.Const;
import org.apache.hop.core.row.IValueMeta;
import org.apache.hop.core.row.value.ValueMetaBase;
import org.apache.hop.core.util.Utils;
import org.apache.hop.core.variables.IVariables;
import org.apache.hop.i18n.BaseMessages;
import org.apache.hop.pipeline.Pipeline;
import org.apache.hop.pipeline.PipelineMeta;
import org.apache.hop.pipeline.PipelinePreviewFactory;
import org.apache.hop.ui.core.PropsUi;
import org.apache.hop.ui.core.dialog.BaseDialog;
import org.apache.hop.ui.core.dialog.EnterNumberDialog;
import org.apache.hop.ui.core.dialog.EnterTextDialog;
import org.apache.hop.ui.core.dialog.ErrorDialog;
import org.apache.hop.ui.core.dialog.MessageBox;
import org.apache.hop.ui.core.dialog.PreviewRowsDialog;
import org.apache.hop.ui.core.widget.ColumnInfo;
import org.apache.hop.ui.core.widget.TableView;
import org.apache.hop.ui.core.widget.TextVar;
import org.apache.hop.ui.pipeline.dialog.PipelinePreviewProgressDialog;
import org.apache.hop.ui.pipeline.transform.BaseTransformDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Link;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
public class GoogleAnalyticsDialog extends BaseTransformDialog {
private static final Class<?> PKG = GoogleAnalyticsMeta.class;
public static final String CONST_GOOGLE_ANALYTICS_DIALOG_QUERY_REFERENCE_LABEL =
"GoogleAnalyticsDialog.Query.Reference.Label";
private GoogleAnalyticsMeta input;
private TextVar wOauthAccount;
private Button fileChooser;
private TextVar keyFilename;
private TableView wFields;
private TextVar wQuStartDate;
private TextVar wQuEndDate;
private TextVar wQuDimensions;
private TextVar wQuMetrics;
private TextVar wQuSort;
private Group gConnect;
private TextVar wGaAppName;
private Text wLimit;
private TextVar wGaPropertyId;
private int middle;
private int margin;
private ModifyListener lsMod;
static final String REFERENCE_SORT_URI =
"https://developers.google.com/analytics/devguides/reporting/data/v1/rest/v1beta/OrderBy";
static final String REFERENCE_METRICS_URI =
"https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#metrics";
static final String REFERENCE_DIMENSIONS_URI =
"https://developers.google.com/analytics/devguides/reporting/data/v1/api-schema#dimensions";
static final String REFERENCE_DIMENSION_AND_METRIC_URI =
"https://support.google.com/analytics/answer/9143382?hl=en";
static final String REFERENCE_METRICAGGS_URI =
"https://developers.google.com/analytics/devguides/reporting/data/v1/rest/v1beta/MetricAggregation";
public GoogleAnalyticsDialog(
Shell parent,
IVariables variables,
GoogleAnalyticsMeta transformMeta,
PipelineMeta pipelineMeta) {
super(parent, variables, transformMeta, pipelineMeta);
setInput(transformMeta);
}
@Override
public String open() {
Shell parent = getParent();
shell = new Shell(parent, SWT.DIALOG_TRIM | SWT.RESIZE | SWT.MIN | SWT.MAX);
PropsUi.setLook(shell);
setShellImage(shell, getInput());
lsMod = e -> getInput().setChanged();
backupChanged = getInput().hasChanged();
FormLayout formLayout = new FormLayout();
formLayout.marginWidth = Const.FORM_MARGIN;
formLayout.marginHeight = Const.FORM_MARGIN;
shell.setLayout(formLayout);
shell.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Shell.Title"));
middle = props.getMiddlePct();
margin = Const.MARGIN;
// Buttons at the very bottom
wOk = new Button(shell, SWT.PUSH);
wOk.setText(BaseMessages.getString(PKG, "System.Button.OK"));
wOk.addListener(SWT.Selection, e -> ok());
wCancel = new Button(shell, SWT.PUSH);
wCancel.setText(BaseMessages.getString(PKG, "System.Button.Cancel"));
wCancel.addListener(SWT.Selection, e -> cancel());
wGet = new Button(shell, SWT.PUSH);
wGet.setText(BaseMessages.getString(PKG, "System.Button.GetFields"));
wGet.addListener(SWT.Selection, e -> getFields());
wPreview = new Button(shell, SWT.PUSH);
wPreview.setText(BaseMessages.getString(PKG, "System.Button.Preview"));
wPreview.addListener(SWT.Selection, e -> preview());
BaseTransformDialog.positionBottomButtons(
shell, new Button[] {wOk, wGet, wPreview, wCancel}, margin, null);
/*************************************************
* // TRANSFORM NAME ENTRY
*************************************************/
// TransformName line
wlTransformName = new Label(shell, SWT.RIGHT);
wlTransformName.setText(BaseMessages.getString(PKG, "System.TransformName.Label"));
wlTransformName.setToolTipText(BaseMessages.getString(PKG, "System.TransformName.Tooltip"));
PropsUi.setLook(wlTransformName);
fdlTransformName = new FormData();
fdlTransformName.left = new FormAttachment(0, 0);
fdlTransformName.right = new FormAttachment(middle, -margin);
fdlTransformName.top = new FormAttachment(0, margin);
wlTransformName.setLayoutData(fdlTransformName);
wTransformName = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wTransformName.setText(transformName);
PropsUi.setLook(wTransformName);
wTransformName.addModifyListener(lsMod);
fdTransformName = new FormData();
fdTransformName.left = new FormAttachment(middle, 0);
fdTransformName.top = new FormAttachment(0, margin);
fdTransformName.right = new FormAttachment(100, 0);
wTransformName.setLayoutData(fdTransformName);
/*************************************************
* // GOOGLE ANALYTICS CONNECTION GROUP
*************************************************/
gConnect = new Group(shell, SWT.SHADOW_ETCHED_IN);
gConnect.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.ConnectGroup.Label"));
FormLayout gConnectLayout = new FormLayout();
gConnectLayout.marginWidth = 3;
gConnectLayout.marginHeight = 3;
gConnect.setLayout(gConnectLayout);
PropsUi.setLook(gConnect);
FormData fdConnect = new FormData();
fdConnect.left = new FormAttachment(0, 0);
fdConnect.right = new FormAttachment(100, 0);
fdConnect.top = new FormAttachment(wTransformName, margin);
gConnect.setLayoutData(fdConnect);
// Google Analytics app name
Label wlGaAppName = new Label(gConnect, SWT.RIGHT);
wlGaAppName.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.AppName.Label"));
PropsUi.setLook(wlGaAppName);
FormData fdlGaAppName = new FormData();
fdlGaAppName.top = new FormAttachment(0, margin);
fdlGaAppName.left = new FormAttachment(0, 0);
fdlGaAppName.right = new FormAttachment(middle, -margin);
wlGaAppName.setLayoutData(fdlGaAppName);
wGaAppName = new TextVar(variables, gConnect, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wGaAppName.addModifyListener(lsMod);
wGaAppName.setToolTipText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.AppName.Tooltip"));
PropsUi.setLook(wGaAppName);
FormData fdGaAppName = new FormData();
fdGaAppName.top = new FormAttachment(wTransformName, margin);
fdGaAppName.left = new FormAttachment(middle, 0);
fdGaAppName.right = new FormAttachment(100, 0);
wGaAppName.setLayoutData(fdGaAppName);
createOauthServiceCredentialsControls();
/*************************************************
* // GOOGLE ANALYTICS QUERY GROUP
*************************************************/
Group gQuery = new Group(shell, SWT.SHADOW_ETCHED_IN);
gQuery.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.QueryGroup.Label"));
FormLayout gQueryLayout = new FormLayout();
gQueryLayout.marginWidth = 3;
gQueryLayout.marginHeight = 3;
gQuery.setLayout(gQueryLayout);
PropsUi.setLook(gQuery);
// query start date
Label wlQuStartDate = new Label(gQuery, SWT.RIGHT);
wlQuStartDate.setText(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Query.StartDate.Label"));
PropsUi.setLook(wlQuStartDate);
FormData fdlQuStartDate = new FormData();
fdlQuStartDate.top = new FormAttachment(0, margin);
fdlQuStartDate.left = new FormAttachment(0, 0);
fdlQuStartDate.right = new FormAttachment(middle, -margin);
wlQuStartDate.setLayoutData(fdlQuStartDate);
wQuStartDate = new TextVar(variables, gQuery, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wQuStartDate.addModifyListener(lsMod);
wQuStartDate.setToolTipText(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Query.StartDate.Tooltip"));
PropsUi.setLook(wQuStartDate);
FormData fdQuStartDate = new FormData();
fdQuStartDate.top = new FormAttachment(0, margin);
fdQuStartDate.left = new FormAttachment(middle, 0);
fdQuStartDate.right = new FormAttachment(100, 0);
wQuStartDate.setLayoutData(fdQuStartDate);
// query end date
Label wlQuEndDate = new Label(gQuery, SWT.RIGHT);
wlQuEndDate.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Query.EndDate.Label"));
PropsUi.setLook(wlQuEndDate);
FormData fdlQuEndDate = new FormData();
fdlQuEndDate.top = new FormAttachment(wQuStartDate, margin);
fdlQuEndDate.left = new FormAttachment(0, 0);
fdlQuEndDate.right = new FormAttachment(middle, -margin);
wlQuEndDate.setLayoutData(fdlQuEndDate);
wQuEndDate = new TextVar(variables, gQuery, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wQuEndDate.addModifyListener(lsMod);
wQuEndDate.setToolTipText(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Query.EndDate.Tooltip"));
PropsUi.setLook(wQuEndDate);
FormData fdQuEndDate = new FormData();
fdQuEndDate.top = new FormAttachment(wQuStartDate, margin);
fdQuEndDate.left = new FormAttachment(middle, 0);
fdQuEndDate.right = new FormAttachment(100, 0);
wQuEndDate.setLayoutData(fdQuEndDate);
// query dimensions
Label wlQuDimensions = new Label(gQuery, SWT.RIGHT);
wlQuDimensions.setText(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Query.Dimensions.Label"));
PropsUi.setLook(wlQuDimensions);
FormData fdlQuDimensions = new FormData();
fdlQuDimensions.top = new FormAttachment(wQuEndDate, margin);
fdlQuDimensions.left = new FormAttachment(0, 0);
fdlQuDimensions.right = new FormAttachment(middle, -margin);
wlQuDimensions.setLayoutData(fdlQuDimensions);
wQuDimensions = new TextVar(variables, gQuery, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wQuDimensions.addModifyListener(lsMod);
wQuDimensions.setToolTipText(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Query.Dimensions.Tooltip"));
PropsUi.setLook(wQuDimensions);
Link wQuDimensionsReference = new Link(gQuery, SWT.SINGLE);
wQuDimensionsReference.setText(
BaseMessages.getString(PKG, CONST_GOOGLE_ANALYTICS_DIALOG_QUERY_REFERENCE_LABEL));
PropsUi.setLook(wQuDimensionsReference);
wQuDimensionsReference.addListener(
SWT.Selection, ev -> BareBonesBrowserLaunch.openURL(REFERENCE_DIMENSIONS_URI));
wQuDimensionsReference.pack(true);
FormData fdQuDimensions = new FormData();
fdQuDimensions.top = new FormAttachment(wQuEndDate, margin);
fdQuDimensions.left = new FormAttachment(middle, 0);
fdQuDimensions.right =
new FormAttachment(100, -wQuDimensionsReference.getBounds().width - margin);
wQuDimensions.setLayoutData(fdQuDimensions);
FormData fdQuDimensionsReference = new FormData();
fdQuDimensionsReference.top = new FormAttachment(wQuEndDate, margin);
fdQuDimensionsReference.left = new FormAttachment(wQuDimensions, 0);
fdQuDimensionsReference.right = new FormAttachment(100, 0);
wQuDimensionsReference.setLayoutData(fdQuDimensionsReference);
// query Metrics
Label wlQuMetrics = new Label(gQuery, SWT.RIGHT);
wlQuMetrics.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Query.Metrics.Label"));
PropsUi.setLook(wlQuMetrics);
FormData fdlQuMetrics = new FormData();
fdlQuMetrics.top = new FormAttachment(wQuDimensions, margin);
fdlQuMetrics.left = new FormAttachment(0, 0);
fdlQuMetrics.right = new FormAttachment(middle, -margin);
wlQuMetrics.setLayoutData(fdlQuMetrics);
wQuMetrics = new TextVar(variables, gQuery, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wQuMetrics.addModifyListener(lsMod);
wQuMetrics.setToolTipText(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Query.Metrics.Tooltip"));
PropsUi.setLook(wQuMetrics);
Link wQuMetricsReference = new Link(gQuery, SWT.SINGLE);
wQuMetricsReference.setText(
BaseMessages.getString(PKG, CONST_GOOGLE_ANALYTICS_DIALOG_QUERY_REFERENCE_LABEL));
PropsUi.setLook(wQuMetricsReference);
wQuMetricsReference.addListener(
SWT.Selection, ev -> BareBonesBrowserLaunch.openURL(REFERENCE_METRICS_URI));
wQuMetricsReference.pack(true);
FormData fdQuMetrics = new FormData();
fdQuMetrics.top = new FormAttachment(wQuDimensions, margin);
fdQuMetrics.left = new FormAttachment(middle, 0);
fdQuMetrics.right = new FormAttachment(100, -wQuMetricsReference.getBounds().width - margin);
wQuMetrics.setLayoutData(fdQuMetrics);
FormData fdQuMetricsReference = new FormData();
fdQuMetricsReference.top = new FormAttachment(wQuDimensions, margin);
fdQuMetricsReference.left = new FormAttachment(wQuMetrics, 0);
fdQuMetricsReference.right = new FormAttachment(100, 0);
wQuMetricsReference.setLayoutData(fdQuMetricsReference);
// query Sort
Label wlQuSort = new Label(gQuery, SWT.RIGHT);
wlQuSort.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Query.Sort.Label"));
PropsUi.setLook(wlQuSort);
FormData fdlQuSort = new FormData();
fdlQuSort.top = new FormAttachment(wQuMetrics, margin);
fdlQuSort.left = new FormAttachment(0, 0);
fdlQuSort.right = new FormAttachment(middle, -margin);
wlQuSort.setLayoutData(fdlQuSort);
wQuSort = new TextVar(variables, gQuery, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wQuSort.addModifyListener(lsMod);
wQuSort.setToolTipText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Query.Sort.Tooltip"));
PropsUi.setLook(wQuSort);
Link wQuSortReference = new Link(gQuery, SWT.SINGLE);
wQuSortReference.setText(
BaseMessages.getString(PKG, CONST_GOOGLE_ANALYTICS_DIALOG_QUERY_REFERENCE_LABEL));
PropsUi.setLook(wQuSortReference);
wQuSortReference.addListener(
SWT.Selection, ev -> BareBonesBrowserLaunch.openURL(REFERENCE_SORT_URI));
wQuSortReference.pack(true);
FormData fdQuSort = new FormData();
fdQuSort.top = new FormAttachment(wQuMetrics, margin);
fdQuSort.left = new FormAttachment(middle, 0);
fdQuSort.right = new FormAttachment(100, -wQuSortReference.getBounds().width - margin);
wQuSort.setLayoutData(fdQuSort);
FormData fdQuSortReference = new FormData();
fdQuSortReference.top = new FormAttachment(wQuMetrics, margin);
fdQuSortReference.left = new FormAttachment(wQuSort, 0);
fdQuSortReference.right = new FormAttachment(100, 0);
wQuSortReference.setLayoutData(fdQuSortReference);
FormData fdQueryGroup = new FormData();
fdQueryGroup.left = new FormAttachment(0, 0);
fdQueryGroup.right = new FormAttachment(100, 0);
fdQueryGroup.top = new FormAttachment(gConnect, margin);
gQuery.setLayoutData(fdQueryGroup);
gQuery.setTabList(new Control[] {wQuStartDate, wQuEndDate, wQuDimensions, wQuMetrics, wQuSort});
// Limit input ...
Label wlLimit = new Label(shell, SWT.RIGHT);
wlLimit.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.LimitSize.Label"));
PropsUi.setLook(wlLimit);
FormData fdlLimit = new FormData();
fdlLimit.left = new FormAttachment(0, 0);
fdlLimit.right = new FormAttachment(middle, -margin);
fdlLimit.bottom = new FormAttachment(wOk, -2 * margin);
wlLimit.setLayoutData(fdlLimit);
wLimit = new Text(shell, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wLimit.setToolTipText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.LimitSize.Tooltip"));
PropsUi.setLook(wLimit);
wLimit.addModifyListener(lsMod);
FormData fdLimit = new FormData();
fdLimit.left = new FormAttachment(middle, 0);
fdLimit.right = new FormAttachment(100, 0);
fdLimit.top = new FormAttachment(wlLimit, 0, SWT.CENTER);
wLimit.setLayoutData(fdLimit);
/*************************************************
* // KEY / LOOKUP TABLE
*************************************************/
// lookup fields settings widgets
Link wlFields = new Link(shell, SWT.NONE);
wlFields.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.Return.Label"));
PropsUi.setLook(wlFields);
wlFields.addListener(
SWT.Selection, ev -> BareBonesBrowserLaunch.openURL(REFERENCE_DIMENSION_AND_METRIC_URI));
FormData fdlReturn = new FormData();
fdlReturn.left = new FormAttachment(0, 0);
fdlReturn.top = new FormAttachment(gQuery, margin);
wlFields.setLayoutData(fdlReturn);
int fieldWidgetCols = 5;
int fieldWidgetRows =
(getInput().getGoogleAnalyticsFields() != null
? getInput().getGoogleAnalyticsFields().size()
: 1);
ColumnInfo[] ciKeys = new ColumnInfo[fieldWidgetCols];
ciKeys[0] =
new ColumnInfo(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.ColumnInfo.FeedFieldType"),
ColumnInfo.COLUMN_TYPE_CCOMBO,
new String[] {
GoogleAnalyticsMeta.FIELD_TYPE_DIMENSION,
GoogleAnalyticsMeta.FIELD_TYPE_METRIC,
GoogleAnalyticsMeta.FIELD_TYPE_DATA_SOURCE_PROPERTY,
GoogleAnalyticsMeta.FIELD_TYPE_DATA_SOURCE_FIELD
},
true);
ciKeys[1] =
new ColumnInfo(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.ColumnInfo.FeedField"),
ColumnInfo.COLUMN_TYPE_TEXT,
false,
false);
ciKeys[1].setUsingVariables(true);
ciKeys[2] =
new ColumnInfo(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.ColumnInfo.RenameTo"),
ColumnInfo.COLUMN_TYPE_TEXT,
false,
false);
ciKeys[3] =
new ColumnInfo(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.ColumnInfo.Type"),
ColumnInfo.COLUMN_TYPE_CCOMBO,
ValueMetaBase.getTypes());
ciKeys[4] =
new ColumnInfo(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.ColumnInfo.Format"),
ColumnInfo.COLUMN_TYPE_FORMAT,
4);
setTableView(
new TableView(
variables,
shell,
SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI | SWT.V_SCROLL | SWT.H_SCROLL,
ciKeys,
fieldWidgetRows,
lsMod,
props));
FormData fdReturn = new FormData();
fdReturn.left = new FormAttachment(0, 0);
fdReturn.top = new FormAttachment(wlFields, margin);
fdReturn.right = new FormAttachment(100, 0);
fdReturn.bottom = new FormAttachment(wLimit, -margin);
getTableView().setLayoutData(fdReturn);
fileChooser.addListener(SWT.Selection, this::browseKeyFile);
/*************************************************
* // POPULATE AND OPEN DIALOG
*************************************************/
getData();
getInput().setChanged(backupChanged);
wTransformName.setFocus();
shell.setTabList(new Control[] {wTransformName, gConnect, gQuery, getTableView()});
BaseDialog.defaultShellHandling(shell, c -> ok(), c -> cancel());
return transformName;
}
private void browseKeyFile(Event e) {
BaseDialog.presentFileDialog(
shell,
keyFilename,
variables,
new String[] {"*.properties", "*.*"},
new String[] {"Properties files (*.properties)", "All Files (*.*)"},
true);
}
private RunReportResponse getReportResponse() {
BetaAnalyticsDataClient analyticsData = null;
try {
InputStream inputStream = new FileInputStream(keyFilename.getText());
Credentials credentials = ServiceAccountCredentials.fromStream(inputStream);
BetaAnalyticsDataSettings settings =
BetaAnalyticsDataSettings.newHttpJsonBuilder()
.setCredentialsProvider(FixedCredentialsProvider.create(credentials))
.build();
analyticsData = BetaAnalyticsDataClient.create(settings);
} catch (IOException e) {
new ErrorDialog(
shell,
"Error creating connection",
"Error reading key file or creating Google Analytics connection",
e);
}
List<Dimension> dimensionList = new ArrayList<>();
String dimensionString = wQuDimensions.getText();
for (String dimension : dimensionString.split(",")) {
dimensionList.add(Dimension.newBuilder().setName(dimension).build());
}
List<Metric> metricList = new ArrayList<>();
String metricsString = wQuMetrics.getText();
for (String metric : metricsString.split(",")) {
metricList.add(Metric.newBuilder().setName(metric).build());
}
RunReportRequest request =
RunReportRequest.newBuilder()
.setProperty("properties/" + wGaPropertyId.getText())
.addAllDimensions(dimensionList)
.addAllMetrics(metricList)
.addDateRanges(
DateRange.newBuilder()
.setStartDate(wQuStartDate.getText())
.setEndDate(wQuEndDate.getText()))
.build();
return analyticsData.runReport(request);
}
// Visible for testing
void getFields() {
getTableView().removeAll();
RunReportResponse response = getReportResponse();
List<DimensionHeader> dimensionHeaders = response.getDimensionHeadersList();
List<MetricHeader> metricHeaders = response.getMetricHeadersList();
if (response == null || dimensionHeaders.isEmpty() || metricHeaders.isEmpty()) {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_ERROR);
mb.setText("Query yields empty feed");
mb.setMessage("The feed did not give any results. Please specify a query that returns data.");
mb.open();
return;
}
int i = 0;
getTableView().table.setItemCount(dimensionHeaders.size() + metricHeaders.size());
for (DimensionHeader colHeader : dimensionHeaders) {
String name = colHeader.getName();
TableItem item = getTableView().table.getItem(i);
item.setText(1, GoogleAnalyticsMeta.FIELD_TYPE_DIMENSION);
item.setText(2, name);
item.setText(3, name);
item.setText(4, ValueMetaBase.getTypeDesc(IValueMeta.TYPE_STRING));
item.setText(5, "");
i++;
}
for (MetricHeader metricHeader : metricHeaders) {
TableItem item = getTableView().table.getItem(i);
String name = metricHeader.getName();
item.setText(1, GoogleAnalyticsMeta.FIELD_TYPE_METRIC);
item.setText(2, name);
item.setText(3, name);
MetricType metricType = metricHeader.getType();
if (metricType.equals(MetricType.TYPE_INTEGER)) {
item.setText(4, ValueMetaBase.getTypeDesc(IValueMeta.TYPE_INTEGER));
item.setText(5, "#;-#");
} else if (metricType.equals(MetricType.TYPE_FLOAT)
|| metricType.equals(MetricType.TYPE_SECONDS)
|| metricType.equals(MetricType.TYPE_MILLISECONDS)
|| metricType.equals(MetricType.TYPE_MINUTES)
|| metricType.equals(MetricType.TYPE_HOURS)
|| metricType.equals(MetricType.TYPE_STANDARD)
|| metricType.equals(MetricType.TYPE_CURRENCY)
|| metricType.equals(MetricType.TYPE_FEET)
|| metricType.equals(MetricType.TYPE_MILES)
|| metricType.equals(MetricType.TYPE_METERS)
|| metricType.equals(MetricType.TYPE_KILOMETERS)) {
item.setText(4, ValueMetaBase.getTypeDesc(IValueMeta.TYPE_NUMBER));
item.setText(5, "#.#;-#.#");
} else {
item.setText(4, ValueMetaBase.getTypeDesc(IValueMeta.TYPE_STRING));
item.setText(5, "");
}
i++;
}
getTableView().removeEmptyRows();
getTableView().setRowNums();
getTableView().optWidth(true);
getInput().setChanged();
}
private void getInfo(GoogleAnalyticsMeta meta) {
transformName = wTransformName.getText(); // return value
meta.setGaAppName(wGaAppName.getText());
meta.setOAuthServiceAccount(wOauthAccount.getText());
meta.setOAuthKeyFile(keyFilename.getText());
meta.setGaProperty(wGaPropertyId.getText());
meta.setStartDate(wQuStartDate.getText());
meta.setEndDate(wQuEndDate.getText());
meta.setDimensions(wQuDimensions.getText());
meta.setMetrics(wQuMetrics.getText());
meta.setSort(wQuSort.getText());
int nrFields = getTableView().nrNonEmpty();
List<GoogleAnalyticsField> googleAnalyticsFields = new ArrayList<>();
for (int i = 0; i < nrFields; i++) {
TableItem item = getTableView().getNonEmpty(i);
GoogleAnalyticsField field = new GoogleAnalyticsField();
field.setFeedFieldType(item.getText(1));
field.setFeedField(item.getText(2));
field.setOutputFieldName(item.getText(3));
field.setType(item.getText(4));
field.setInputFormat(item.getText(5));
googleAnalyticsFields.add(field);
}
meta.setGoogleAnalyticsFields(googleAnalyticsFields);
meta.setRowLimit(Const.toInt(wLimit.getText(), 0));
}
// Preview the data
private void preview() {
// Create the XML input transform
GoogleAnalyticsMeta oneMeta = new GoogleAnalyticsMeta();
getInfo(oneMeta);
EnterNumberDialog numberDialog =
new EnterNumberDialog(
shell,
props.getDefaultPreviewSize(),
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.PreviewSize.DialogTitle"),
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.PreviewSize.DialogMessage"));
int previewSize = numberDialog.open();
if (previewSize > 0) {
oneMeta.setRowLimit(previewSize);
PipelineMeta previewMeta =
PipelinePreviewFactory.generatePreviewPipeline(
pipelineMeta.getMetadataProvider(), oneMeta, wTransformName.getText());
PipelinePreviewProgressDialog progressDialog =
new PipelinePreviewProgressDialog(
shell,
variables,
previewMeta,
new String[] {wTransformName.getText()},
new int[] {previewSize});
progressDialog.open();
Pipeline pipeline = progressDialog.getPipeline();
String loggingText = progressDialog.getLoggingText();
if (!progressDialog.isCancelled()
&& pipeline.getResult() != null
&& pipeline.getResult().getNrErrors() > 0) {
EnterTextDialog etd =
new EnterTextDialog(
shell,
BaseMessages.getString(PKG, "System.Dialog.PreviewError.Title"),
BaseMessages.getString(PKG, "System.Dialog.PreviewError.Message"),
loggingText,
true);
etd.setReadOnly();
etd.open();
}
PreviewRowsDialog prd =
new PreviewRowsDialog(
shell,
variables,
SWT.NONE,
wTransformName.getText(),
progressDialog.getPreviewRowsMeta(wTransformName.getText()),
progressDialog.getPreviewRows(wTransformName.getText()),
loggingText);
prd.open();
}
}
/** Collect data from the meta and place it in the dialog */
public void getData() {
if (getInput().getGaAppName() != null) {
wGaAppName.setText(getInput().getGaAppName());
}
wOauthAccount.setText(Const.NVL(getInput().getOAuthServiceAccount(), ""));
keyFilename.setText(Const.NVL(getInput().getOAuthKeyFile(), ""));
wGaPropertyId.setText(Const.NVL(getInput().getGaProperty(), ""));
if (getInput().getStartDate() != null) {
wQuStartDate.setText(getInput().getStartDate());
}
if (getInput().getEndDate() != null) {
wQuEndDate.setText(getInput().getEndDate());
}
if (getInput().getDimensions() != null) {
wQuDimensions.setText(getInput().getDimensions());
}
if (getInput().getMetrics() != null) {
wQuMetrics.setText(getInput().getMetrics());
}
if (getInput().getSort() != null) {
wQuSort.setText(getInput().getSort());
}
wFields.removeAll();
wFields.removeEmptyRows();
if (!input.getGoogleAnalyticsFields().isEmpty()) {
wFields.table.setItemCount(input.getGoogleAnalyticsFields().size());
List<GoogleAnalyticsField> googleAnalyticsFields = input.getGoogleAnalyticsFields();
int i = 0;
for (GoogleAnalyticsField field : googleAnalyticsFields) {
TableItem item = wFields.table.getItem(i);
if (!Utils.isEmpty(field.getType())) {
item.setText(1, field.getFeedFieldType());
}
if (!Utils.isEmpty(field.getFeedField())) {
item.setText(2, field.getFeedField());
}
if (!Utils.isEmpty(field.getOutputFieldName())) {
item.setText(3, field.getOutputFieldName());
}
if (!Utils.isEmpty(field.getType())) {
item.setText(4, field.getType());
}
if (!Utils.isEmpty(field.getInputFormat())) {
item.setText(5, field.getType());
}
i++;
}
}
wFields.removeEmptyRows();
getTableView().setRowNums();
getTableView().optWidth(true);
wLimit.setText(getInput().getRowLimit() + "");
setActive();
wTransformName.selectAll();
wTransformName.setFocus();
}
private void cancel() {
transformName = null;
getInput().setChanged(backupChanged);
dispose();
}
// let the meta know about the entered data
private void ok() {
getInfo(getInput());
dispose();
}
private void createOauthServiceCredentialsControls() {
// OathAccount line
Label wlOauthAccount = new Label(gConnect, SWT.RIGHT);
wlOauthAccount.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.OauthAccount.Label"));
PropsUi.setLook(wlOauthAccount);
FormData fdlOathAccount = new FormData();
fdlOathAccount.left = new FormAttachment(0, 0);
fdlOathAccount.top = new FormAttachment(wGaAppName, margin);
fdlOathAccount.right = new FormAttachment(middle, -margin);
wlOauthAccount.setLayoutData(fdlOathAccount);
wOauthAccount = new TextVar(variables, gConnect, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wOauthAccount.setToolTipText(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.OauthAccount.Tooltip"));
PropsUi.setLook(wOauthAccount);
wOauthAccount.addModifyListener(lsMod);
FormData fdOathAccount = new FormData();
fdOathAccount.left = new FormAttachment(middle, 0);
fdOathAccount.top = new FormAttachment(wGaAppName, margin);
fdOathAccount.right = new FormAttachment(100, -margin);
wOauthAccount.setLayoutData(fdOathAccount);
fileChooser = new Button(gConnect, SWT.PUSH | SWT.CENTER);
fileChooser.setText(BaseMessages.getString(PKG, ("System.Button.Browse")));
PropsUi.setLook(fileChooser);
FormData fdbFilename = new FormData();
fdbFilename.right = new FormAttachment(100, 0);
fdbFilename.top = new FormAttachment(wOauthAccount, margin);
fileChooser.setLayoutData(fdbFilename);
Label wlFilename = new Label(gConnect, SWT.RIGHT);
wlFilename.setText(BaseMessages.getString(PKG, ("GoogleAnalyticsDialog.KeyFile.Label")));
PropsUi.setLook(wlFilename);
FormData fdlFilename = new FormData();
fdlFilename.top = new FormAttachment(wOauthAccount, margin);
fdlFilename.left = new FormAttachment(0, 0);
fdlFilename.right = new FormAttachment(middle, -margin);
wlFilename.setLayoutData(fdlFilename);
keyFilename = new TextVar(variables, gConnect, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
keyFilename.setToolTipText(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.KeyFilename.Tooltip"));
keyFilename.addModifyListener(lsMod);
PropsUi.setLook(keyFilename);
FormData fdFilename = new FormData();
fdFilename.top = new FormAttachment(wOauthAccount, margin);
fdFilename.left = new FormAttachment(middle, 0);
fdFilename.right = new FormAttachment(fileChooser, -margin);
keyFilename.setLayoutData(fdFilename);
Label wlGaPropertyId = new Label(gConnect, SWT.RIGHT);
wlGaPropertyId.setText(BaseMessages.getString(PKG, "GoogleAnalyticsDialog.PropertyId.Label"));
PropsUi.setLook(wlGaPropertyId);
FormData fdlGaPropertyId = new FormData();
fdlGaPropertyId.top = new FormAttachment(keyFilename, margin);
fdlGaPropertyId.left = new FormAttachment(0, 0);
fdlGaPropertyId.right = new FormAttachment(middle, -margin);
wlGaPropertyId.setLayoutData(fdlGaPropertyId);
wGaPropertyId = new TextVar(variables, gConnect, SWT.SINGLE | SWT.LEFT | SWT.BORDER);
wGaPropertyId.setToolTipText(
BaseMessages.getString(PKG, "GoogleAnalyticsDialog.PropertyId.Tooltip"));
wGaPropertyId.addModifyListener(lsMod);
PropsUi.setLook(wGaPropertyId);
FormData fdGaPropertyId = new FormData();
fdGaPropertyId.top = new FormAttachment(keyFilename, margin);
fdGaPropertyId.left = new FormAttachment(middle, 0);
fdGaPropertyId.right = new FormAttachment(100, -margin);
wGaPropertyId.setLayoutData(fdGaPropertyId);
}
TableView getTableView() {
return wFields;
}
void setTableView(TableView wFields) {
this.wFields = wFields;
}
GoogleAnalyticsMeta getInput() {
return input;
}
void setInput(GoogleAnalyticsMeta input) {
this.input = input;
}
}
|
apache/iotdb | 34,925 | integration-test/src/test/java/org/apache/iotdb/relational/it/db/it/IoTDBPatternRecognitionIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.relational.it.db.it;
import org.apache.iotdb.it.env.EnvFactory;
import org.apache.iotdb.it.framework.IoTDBTestRunner;
import org.apache.iotdb.itbase.category.TableClusterIT;
import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.sql.Connection;
import java.sql.Statement;
import static java.lang.String.format;
import static org.apache.iotdb.db.it.utils.TestUtils.tableResultSetEqualTest;
import static org.junit.Assert.fail;
@RunWith(IoTDBTestRunner.class)
@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
public class IoTDBPatternRecognitionIT {
private static final String DATABASE_NAME = "test";
private static final String[] sqls =
new String[] {
"CREATE DATABASE " + DATABASE_NAME,
"USE " + DATABASE_NAME,
// TABLE: beidou
"CREATE TABLE beidou(device_id STRING TAG, department STRING FIELD, altitude DOUBLE FIELD)",
// d1 and DEP1
"INSERT INTO beidou VALUES (2025-01-01T00:00:00, 'd1', 'DEP1', 480.5)",
"INSERT INTO beidou VALUES (2025-01-01T00:01:00, 'd1', 'DEP1', 510.2)",
"INSERT INTO beidou VALUES (2025-01-01T00:02:00, 'd1', 'DEP1', 508.7)",
"INSERT INTO beidou VALUES (2025-01-01T00:04:00, 'd1', 'DEP1', 495.0)",
"INSERT INTO beidou VALUES (2025-01-01T00:05:00, 'd1', 'DEP1', 523.0)",
"INSERT INTO beidou VALUES (2025-01-01T00:06:00, 'd1', 'DEP1', 517.4)",
// d2 and DEP1
"INSERT INTO beidou VALUES (2025-01-01T00:07:00, 'd2', 'DEP1', 530.1)",
"INSERT INTO beidou VALUES (2025-01-01T00:08:00, 'd2', 'DEP1', 540.4)",
"INSERT INTO beidou VALUES (2025-01-01T00:09:00, 'd2', 'DEP1', 498.2)",
// DEP2
"INSERT INTO beidou VALUES (2025-01-01T00:10:00, 'd3', 'DEP2', 470.0)",
"INSERT INTO beidou VALUES (2025-01-01T00:11:00, 'd3', 'DEP2', 505.0)",
"INSERT INTO beidou VALUES (2025-01-01T00:12:00, 'd3', 'DEP2', 480.0)",
// altitude lower than 500
"INSERT INTO beidou VALUES (2025-01-01T00:13:00, 'd4', 'DEP_1', 450)",
"INSERT INTO beidou VALUES (2025-01-01T00:14:00, 'd4', 'DEP_1', 470)",
"INSERT INTO beidou VALUES (2025-01-01T00:15:00, 'd4', 'DEP_1', 490)",
// outside the time range
"INSERT INTO beidou VALUES (2024-01-01T00:30:00, 'd1', 'DEP_1', 600)",
"INSERT INTO beidou VALUES (2025-01-01T02:00:00, 'd1', 'DEP_1', 570)",
// TABLE: t1
"CREATE TABLE t1(totalprice DOUBLE FIELD)",
"INSERT INTO t1 VALUES (2025-01-01T00:01:00, 90)",
"INSERT INTO t1 VALUES (2025-01-01T00:02:00, 80)",
"INSERT INTO t1 VALUES (2025-01-01T00:03:00, 70)",
"INSERT INTO t1 VALUES (2025-01-01T00:04:00, 70)",
// TABLE: t2
"CREATE TABLE t2(totalprice DOUBLE FIELD)",
"INSERT INTO t2 VALUES (2025-01-01T00:01:00, 10)",
"INSERT INTO t2 VALUES (2025-01-01T00:02:00, 20)",
"INSERT INTO t2 VALUES (2025-01-01T00:03:00, 30)",
// TABLE: t3
"CREATE TABLE t3(totalprice DOUBLE FIELD)",
"INSERT INTO t3 VALUES (2025-01-01T00:01:00, 10)",
"INSERT INTO t3 VALUES (2025-01-01T00:02:00, 20)",
"INSERT INTO t3 VALUES (2025-01-01T00:03:00, 30)",
"INSERT INTO t3 VALUES (2025-01-01T00:04:00, 30)",
"INSERT INTO t3 VALUES (2025-01-01T00:05:00, 40)",
// TABLE: t4
"CREATE TABLE t4(totalprice DOUBLE FIELD)",
"INSERT INTO t4 VALUES (2025-01-01T00:01:00, 90)",
"INSERT INTO t4 VALUES (2025-01-01T00:02:00, 80)",
"INSERT INTO t4 VALUES (2025-01-01T00:03:00, 70)",
"INSERT INTO t4 VALUES (2025-01-01T00:04:00, 80)",
// TABLE: t5
"CREATE TABLE t5(part STRING TAG, num INT32 FIELD, totalprice DOUBLE FIELD)",
"INSERT INTO t5 VALUES (2025-01-01T00:01:00, 'p1', 1, 10.0)",
};
protected static void insertData() {
try (Connection connection = EnvFactory.getEnv().getTableConnection();
Statement statement = connection.createStatement()) {
for (String sql : sqls) {
statement.execute(sql);
}
} catch (Exception e) {
fail("insertData failed.");
}
}
@BeforeClass
public static void setUp() {
EnvFactory.getEnv().initClusterEnvironment();
insertData();
}
@AfterClass
public static void tearDown() {
EnvFactory.getEnv().cleanClusterEnvironment();
}
@Test
public void testEventRecognition() {
String[] expectedHeader =
new String[] {"device_id", "match", "event_start", "event_end", "last_altitude"};
String[] retArray =
new String[] {
"d1,1,2024-01-01T00:30:00.000Z,2024-01-01T00:30:00.000Z,600.0,",
"d1,2,2025-01-01T00:01:00.000Z,2025-01-01T00:02:00.000Z,508.7,",
"d1,3,2025-01-01T00:05:00.000Z,2025-01-01T02:00:00.000Z,570.0,",
"d2,1,2025-01-01T00:07:00.000Z,2025-01-01T00:08:00.000Z,540.4,",
"d3,1,2025-01-01T00:11:00.000Z,2025-01-01T00:11:00.000Z,505.0,",
};
tableResultSetEqualTest(
"SELECT * "
+ "FROM beidou "
+ "MATCH_RECOGNIZE ( "
+ " PARTITION BY device_id "
+ " ORDER BY time "
+ " MEASURES "
+ " MATCH_NUMBER() AS match, "
+ " RPR_FIRST(A.time) AS event_start, "
+ " RPR_LAST(A.time) AS event_end, "
+ " RPR_LAST(A.altitude) AS last_altitude "
+ " ONE ROW PER MATCH "
+ " PATTERN (A+) "
+ " DEFINE "
+ " A AS A.altitude > 500 "
+ ") AS m "
+ "ORDER BY device_id, match ",
expectedHeader,
retArray,
DATABASE_NAME);
}
/**
* Search range: all devices whose department is 'DEP_1', each device's data is grouped
* separately, and the time range is between 2025-01-01T00:00:00 and 2025-01-01T01:00:00.
*
* <p>Event analysis: Whenever the altitude exceeds 500 and then drops below 500, it is marked as
* an event.
*/
@Test
public void testEventRecognitionWithSubquery() {
String[] expectedHeader =
new String[] {"device_id", "match", "event_start", "event_end", "last_altitude"};
String[] retArray =
new String[] {
"d1,1,2025-01-01T00:01:00.000Z,2025-01-01T00:02:00.000Z,508.7,",
"d1,2,2025-01-01T00:05:00.000Z,2025-01-01T00:06:00.000Z,517.4,",
"d2,1,2025-01-01T00:07:00.000Z,2025-01-01T00:08:00.000Z,540.4,",
};
tableResultSetEqualTest(
"SELECT * "
+ "FROM ( "
+ " SELECT time, device_id, altitude "
+ " FROM beidou "
+ " WHERE department = 'DEP1' AND time >= 2025-01-01T00:00:00 AND time < 2025-01-01T01:00:00 "
+ ")"
+ "MATCH_RECOGNIZE ( "
+ " PARTITION BY device_id "
+ " ORDER BY time "
+ " MEASURES "
+ " MATCH_NUMBER() AS match, "
+ " RPR_FIRST(A.time) AS event_start, "
+ " RPR_LAST(A.time) AS event_end, "
+ " RPR_LAST(A.altitude) AS last_altitude "
+ " ONE ROW PER MATCH "
+ " PATTERN (A+) "
+ " DEFINE "
+ " A AS A.altitude > 500 "
+ ") AS m "
+ "ORDER BY device_id, match ",
expectedHeader,
retArray,
DATABASE_NAME);
}
@Test
public void testOutputLayout() {
// ONE ROW PER MATCH: PK, Measures
String[] expectedHeader1 = new String[] {"part", "match", "label"};
String[] retArray1 =
new String[] {
"p1,1,A,",
};
// ALL ROWS PER MATCH: PK, OK, Measures, Others
String[] expectedHeader2 = new String[] {"part", "num", "match", "label", "time", "totalprice"};
String[] retArray2 =
new String[] {
"p1,1,1,A,2025-01-01T00:01:00.000Z,10.0,",
};
String sql =
"SELECT * "
+ "FROM t5 "
+ "MATCH_RECOGNIZE ( "
+ " PARTITION BY part "
+ " ORDER BY num "
+ " MEASURES "
+ " MATCH_NUMBER() AS match, "
+ " CLASSIFIER() AS label "
+ " %s "
+ " PATTERN (A+) "
+ " DEFINE "
+ " A AS true "
+ ") AS m ";
tableResultSetEqualTest(
format(sql, "ONE ROW PER MATCH"), expectedHeader1, retArray1, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "ALL ROWS PER MATCH"), expectedHeader2, retArray2, DATABASE_NAME);
}
@Test
public void testOutputMode() {
String[] expectedHeader = new String[] {"match", "price", "label"};
String[] retArray1 =
new String[] {
"1,null,null,", "2,70.0,B,", "3,null,null,",
};
String[] retArray2 =
new String[] {
"1,null,null,", "2,80.0,B,", "2,70.0,B,", "3,null,null,",
};
String[] retArray3 =
new String[] {
"1,80.0,B,", "1,70.0,B,",
};
String[] retArray4 =
new String[] {
"2,80.0,B,", "2,70.0,B,",
};
String[] retArray5 =
new String[] {
"null,null,null,", "1,80.0,B,", "1,70.0,B,", "null,null,null,",
};
String sql =
"SELECT m.match, m.price, m.label "
+ "FROM t1 "
+ "MATCH_RECOGNIZE ( "
+ " ORDER BY time "
+ " MEASURES "
+ " MATCH_NUMBER() AS match, "
+ " RUNNING RPR_LAST(totalprice) AS price, "
+ " CLASSIFIER() AS label "
+ " %s "
+ " AFTER MATCH SKIP PAST LAST ROW "
+ " %s "
+ " DEFINE "
+ " B AS B.totalprice < PREV(B.totalprice) "
+ ") AS m";
tableResultSetEqualTest(
format(sql, "ONE ROW PER MATCH", "PATTERN (B*)"), expectedHeader, retArray1, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "ALL ROWS PER MATCH", "PATTERN (B*)"),
expectedHeader,
retArray2,
DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "ALL ROWS PER MATCH", "PATTERN (B+)"),
expectedHeader,
retArray3,
DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "ALL ROWS PER MATCH OMIT EMPTY MATCHES", "PATTERN (B*)"),
expectedHeader,
retArray4,
DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "ALL ROWS PER MATCH WITH UNMATCHED ROWS", "PATTERN (B+)"),
expectedHeader,
retArray5,
DATABASE_NAME);
}
@Test
public void testLogicalNavigationFunction() {
String[] expectedHeader = new String[] {"time", "price"};
String[] retArray1 =
new String[] {
"2025-01-01T00:01:00.000Z,10.0,",
"2025-01-01T00:02:00.000Z,20.0,",
"2025-01-01T00:03:00.000Z,30.0,",
};
String[] retArray2 =
new String[] {
"2025-01-01T00:01:00.000Z,30.0,",
"2025-01-01T00:02:00.000Z,30.0,",
"2025-01-01T00:03:00.000Z,30.0,",
};
String[] retArray3 =
new String[] {
"2025-01-01T00:01:00.000Z,10.0,",
"2025-01-01T00:02:00.000Z,10.0,",
"2025-01-01T00:03:00.000Z,10.0,",
};
String[] retArray4 =
new String[] {
"2025-01-01T00:01:00.000Z,null,",
"2025-01-01T00:02:00.000Z,null,",
"2025-01-01T00:03:00.000Z,10.0,",
};
String[] retArray5 =
new String[] {
"2025-01-01T00:01:00.000Z,10.0,",
"2025-01-01T00:02:00.000Z,10.0,",
"2025-01-01T00:03:00.000Z,10.0,",
};
String[] retArray6 =
new String[] {
"2025-01-01T00:01:00.000Z,30.0,",
"2025-01-01T00:02:00.000Z,30.0,",
"2025-01-01T00:03:00.000Z,30.0,",
};
String sql =
"SELECT m.time, m.price "
+ "FROM t2 "
+ "MATCH_RECOGNIZE ( "
+ " ORDER BY time "
+ " MEASURES "
+ " %s AS price "
+ " ALL ROWS PER MATCH "
+ " AFTER MATCH SKIP PAST LAST ROW "
+ " PATTERN (A+) "
+ " DEFINE "
+ " A AS true "
+ ") AS m";
// LAST(totalprice)
tableResultSetEqualTest(format(sql, "totalprice"), expectedHeader, retArray1, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "RPR_LAST(totalprice)"), expectedHeader, retArray1, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "RPR_LAST(totalprice, 0)"), expectedHeader, retArray1, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "RUNNING RPR_LAST(totalprice)"), expectedHeader, retArray1, DATABASE_NAME);
// FINAL LAST(totalprice)
tableResultSetEqualTest(
format(sql, "FINAL RPR_LAST(totalprice)"), expectedHeader, retArray2, DATABASE_NAME);
// FIRST(totalprice)
tableResultSetEqualTest(
format(sql, "RPR_FIRST(totalprice)"), expectedHeader, retArray3, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "RPR_FIRST(totalprice, 0)"), expectedHeader, retArray3, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "RUNNING RPR_FIRST(totalprice)"), expectedHeader, retArray3, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "FINAL RPR_FIRST(totalprice)"), expectedHeader, retArray3, DATABASE_NAME);
// LAST(totalprice, 2)
tableResultSetEqualTest(
format(sql, "RPR_LAST(totalprice, 2)"), expectedHeader, retArray4, DATABASE_NAME);
// FINAL LAST(totalprice, 2)
tableResultSetEqualTest(
format(sql, "FINAL RPR_LAST(totalprice, 2)"), expectedHeader, retArray5, DATABASE_NAME);
// FIRST(totalprice, 2)
tableResultSetEqualTest(
format(sql, "RPR_FIRST(totalprice, 2)"), expectedHeader, retArray6, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "FINAL RPR_FIRST(totalprice, 2)"), expectedHeader, retArray6, DATABASE_NAME);
}
@Test
public void testPhysicalNavigationFunction() {
String[] expectedHeader1 = new String[] {"time", "price"};
String[] retArray1 =
new String[] {
"2025-01-01T00:01:00.000Z,null,",
"2025-01-01T00:02:00.000Z,10.0,",
"2025-01-01T00:03:00.000Z,20.0,",
};
String[] retArray2 =
new String[] {
"2025-01-01T00:01:00.000Z,null,",
"2025-01-01T00:02:00.000Z,null,",
"2025-01-01T00:03:00.000Z,10.0,",
};
String[] retArray3 =
new String[] {
"2025-01-01T00:01:00.000Z,20.0,",
"2025-01-01T00:02:00.000Z,30.0,",
"2025-01-01T00:03:00.000Z,null,",
};
String[] retArray4 =
new String[] {
"2025-01-01T00:01:00.000Z,30.0,",
"2025-01-01T00:02:00.000Z,null,",
"2025-01-01T00:03:00.000Z,null,",
};
String[] retArray5 =
new String[] {
"30.0,",
};
String[] retArray6 =
new String[] {
"20.0,",
};
String[] retArray7 =
new String[] {
"40.0,",
};
String[] retArray8 =
new String[] {
"null,",
};
String sql1 =
"SELECT m.time, m.price "
+ "FROM t2 "
+ "MATCH_RECOGNIZE ( "
+ " ORDER BY time "
+ " MEASURES "
+ " %s AS price "
+ " ALL ROWS PER MATCH "
+ " AFTER MATCH SKIP PAST LAST ROW "
+ " PATTERN (A+) "
+ " DEFINE "
+ " A AS true "
+ ") AS m";
// PREV(totalprice)
tableResultSetEqualTest(
format(sql1, "PREV(totalprice)"), expectedHeader1, retArray1, DATABASE_NAME);
tableResultSetEqualTest(
format(sql1, "PREV(totalprice, 1)"), expectedHeader1, retArray1, DATABASE_NAME);
// PREV(totalprice, 2)
tableResultSetEqualTest(
format(sql1, "PREV(totalprice, 2)"), expectedHeader1, retArray2, DATABASE_NAME);
// NEXT(totalprice)
tableResultSetEqualTest(
format(sql1, "NEXT(totalprice)"), expectedHeader1, retArray3, DATABASE_NAME);
tableResultSetEqualTest(
format(sql1, "NEXT(totalprice, 1)"), expectedHeader1, retArray3, DATABASE_NAME);
// NEXT(totalprice, 2)
tableResultSetEqualTest(
format(sql1, "NEXT(totalprice, 2)"), expectedHeader1, retArray4, DATABASE_NAME);
String[] expectedHeader2 = new String[] {"price"};
String sql2 =
"SELECT m.price "
+ "FROM t3 "
+ "MATCH_RECOGNIZE ( "
+ " ORDER BY time "
+ " MEASURES "
+ " %s AS price "
+ " ONE ROW PER MATCH "
+ " AFTER MATCH SKIP PAST LAST ROW "
+ " PATTERN (A) "
+ " DEFINE "
+ " A AS A.totalprice = PREV(A.totalprice) "
+ ") AS m";
// PREV(A.totalprice)
tableResultSetEqualTest(
format(sql2, "PREV(A.totalprice)"), expectedHeader2, retArray5, DATABASE_NAME);
tableResultSetEqualTest(
format(sql2, "PREV(A.totalprice, 1)"), expectedHeader2, retArray5, DATABASE_NAME);
// PREV(A.totalprice, 2)
tableResultSetEqualTest(
format(sql2, "PREV(A.totalprice, 2)"), expectedHeader2, retArray6, DATABASE_NAME);
// NEXT(A.totalprice)
tableResultSetEqualTest(
format(sql2, "NEXT(A.totalprice)"), expectedHeader2, retArray7, DATABASE_NAME);
tableResultSetEqualTest(
format(sql2, "NEXT(A.totalprice, 1)"), expectedHeader2, retArray7, DATABASE_NAME);
// out of partition
tableResultSetEqualTest(
format(sql2, "PREV(A.totalprice, 4)"), expectedHeader2, retArray8, DATABASE_NAME);
tableResultSetEqualTest(
format(sql2, "NEXT(A.totalprice, 2)"), expectedHeader2, retArray8, DATABASE_NAME);
}
@Test
public void testNestedNavigation() {
String[] expectedHeader = new String[] {"time", "price"};
String[] retArray1 =
new String[] {
"2025-01-01T00:01:00.000Z,null,",
"2025-01-01T00:02:00.000Z,null,",
"2025-01-01T00:03:00.000Z,null,",
};
String[] retArray2 =
new String[] {
"2025-01-01T00:01:00.000Z,20.0,",
"2025-01-01T00:02:00.000Z,20.0,",
"2025-01-01T00:03:00.000Z,20.0,",
};
String[] retArray3 =
new String[] {
"2025-01-01T00:01:00.000Z,10.0,",
"2025-01-01T00:02:00.000Z,10.0,",
"2025-01-01T00:03:00.000Z,10.0,",
};
String[] retArray4 =
new String[] {
"2025-01-01T00:01:00.000Z,null,",
"2025-01-01T00:02:00.000Z,10.0,",
"2025-01-01T00:03:00.000Z,20.0,",
};
String[] retArray5 =
new String[] {
"2025-01-01T00:01:00.000Z,null,",
"2025-01-01T00:02:00.000Z,null,",
"2025-01-01T00:03:00.000Z,10.0,",
};
String[] retArray6 =
new String[] {
"2025-01-01T00:01:00.000Z,30.0,",
"2025-01-01T00:02:00.000Z,30.0,",
"2025-01-01T00:03:00.000Z,30.0,",
};
String[] retArray7 =
new String[] {
"2025-01-01T00:01:00.000Z,20.0,",
"2025-01-01T00:02:00.000Z,30.0,",
"2025-01-01T00:03:00.000Z,null,",
};
String[] retArray8 =
new String[] {
"2025-01-01T00:01:00.000Z,30.0,",
"2025-01-01T00:02:00.000Z,null,",
"2025-01-01T00:03:00.000Z,null,",
};
String[] retArray9 =
new String[] {
"2025-01-01T00:01:00.000Z,null,",
"2025-01-01T00:02:00.000Z,null,",
"2025-01-01T00:03:00.000Z,20.0,",
};
String[] retArray10 =
new String[] {
"2025-01-01T00:01:00.000Z,null,",
"2025-01-01T00:02:00.000Z,null,",
"2025-01-01T00:03:00.000Z,30.0,",
};
String sql =
"SELECT m.time, m.price "
+ "FROM t2 "
+ "MATCH_RECOGNIZE ( "
+ " ORDER BY time "
+ " MEASURES "
+ " %s AS price "
+ " ALL ROWS PER MATCH "
+ " AFTER MATCH SKIP PAST LAST ROW "
+ " PATTERN (A+) "
+ " DEFINE "
+ " A AS true "
+ ") AS m";
// PREV(FIRST(totalprice))
tableResultSetEqualTest(
format(sql, "PREV(RPR_FIRST(totalprice))"), expectedHeader, retArray1, DATABASE_NAME);
// PREV(FIRST(totalprice), 2)
tableResultSetEqualTest(
format(sql, "PREV(RPR_FIRST(totalprice), 2)"), expectedHeader, retArray1, DATABASE_NAME);
// PREV(FIRST(totalprice, 2))
tableResultSetEqualTest(
format(sql, "PREV(RPR_FIRST(totalprice, 2))"), expectedHeader, retArray2, DATABASE_NAME);
// PREV(FIRST(totalprice, 2), 2)
tableResultSetEqualTest(
format(sql, "PREV(RPR_FIRST(totalprice, 2), 2)"), expectedHeader, retArray3, DATABASE_NAME);
// PREV(LAST(totalprice))
tableResultSetEqualTest(
format(sql, "PREV(RPR_LAST(totalprice))"), expectedHeader, retArray4, DATABASE_NAME);
// PREV(LAST(totalprice), 2)
tableResultSetEqualTest(
format(sql, "PREV(RPR_LAST(totalprice), 2)"), expectedHeader, retArray5, DATABASE_NAME);
// PREV(LAST(totalprice, 1))
tableResultSetEqualTest(
format(sql, "PREV(RPR_LAST(totalprice, 1))"), expectedHeader, retArray5, DATABASE_NAME);
// PREV(LAST(totalprice, 1), 2)
tableResultSetEqualTest(
format(sql, "PREV(RPR_LAST(totalprice, 1), 2)"), expectedHeader, retArray1, DATABASE_NAME);
// NEXT(FIRST(totalprice))
tableResultSetEqualTest(
format(sql, "NEXT(RPR_FIRST(totalprice))"), expectedHeader, retArray2, DATABASE_NAME);
// NEXT(FIRST(totalprice), 2)
tableResultSetEqualTest(
format(sql, "NEXT(RPR_FIRST(totalprice), 2)"), expectedHeader, retArray6, DATABASE_NAME);
// NEXT(FIRST(totalprice, 1))
tableResultSetEqualTest(
format(sql, "NEXT(RPR_FIRST(totalprice, 1))"), expectedHeader, retArray6, DATABASE_NAME);
// NEXT(FIRST(totalprice, 1), 2)
tableResultSetEqualTest(
format(sql, "NEXT(RPR_FIRST(totalprice, 1), 2)"), expectedHeader, retArray1, DATABASE_NAME);
// NEXT(LAST(totalprice))
tableResultSetEqualTest(
format(sql, "NEXT(RPR_LAST(totalprice))"), expectedHeader, retArray7, DATABASE_NAME);
// NEXT(LAST(totalprice), 2)
tableResultSetEqualTest(
format(sql, "NEXT(RPR_LAST(totalprice), 2)"), expectedHeader, retArray8, DATABASE_NAME);
// NEXT(LAST(totalprice, 2))
tableResultSetEqualTest(
format(sql, "NEXT(RPR_LAST(totalprice, 2))"), expectedHeader, retArray9, DATABASE_NAME);
// NEXT(LAST(totalprice, 2), 2)
tableResultSetEqualTest(
format(sql, "NEXT(RPR_LAST(totalprice, 2), 2)"), expectedHeader, retArray10, DATABASE_NAME);
}
@Test
public void testUnionVariable() {
String[] expectedHeader = new String[] {"time", "match", "price", "lower_or_higher", "label"};
String[] retArray1 =
new String[] {
"2025-01-01T00:01:00.000Z,1,90.0,H,H,",
"2025-01-01T00:02:00.000Z,1,80.0,H,A,",
"2025-01-01T00:03:00.000Z,2,70.0,L,L,",
"2025-01-01T00:04:00.000Z,2,80.0,L,A,",
};
String sql =
"SELECT m.time, m.match, m.price, m.lower_or_higher, m.label "
+ "FROM t4 "
+ "MATCH_RECOGNIZE ( "
+ " ORDER BY time "
+ " MEASURES "
+ " MATCH_NUMBER() AS match, "
+ " RUNNING RPR_LAST(totalprice) AS price, "
+ " CLASSIFIER(U) AS lower_or_higher, "
+ " CLASSIFIER(W) AS label "
+ " ALL ROWS PER MATCH "
+ " AFTER MATCH SKIP PAST LAST ROW "
+ " PATTERN ((L | H) A) "
+ " SUBSET "
+ " U = (L, H), "
+ " W = (A, L, H) "
+ " DEFINE "
+ " A AS A.totalprice = 80, "
+ " L AS L.totalprice < 80, "
+ " H AS H.totalprice > 80 "
+ ") AS m";
tableResultSetEqualTest(sql, expectedHeader, retArray1, DATABASE_NAME);
}
@Test
public void testClassifierFunction() {
String[] expectedHeader =
new String[] {"time", "match", "price", "label", "prev_label", "next_label"};
// The scope of the CLASSIFIER() is within match
String[] retArray1 =
new String[] {
"2025-01-01T00:01:00.000Z,1,90.0,H,null,A,",
"2025-01-01T00:02:00.000Z,1,80.0,A,H,null,",
"2025-01-01T00:03:00.000Z,2,70.0,L,null,A,",
"2025-01-01T00:04:00.000Z,2,80.0,A,L,null,",
};
String sql =
"SELECT m.time, m.match, m.price, m.label, m.prev_label, m.next_label "
+ "FROM t4 "
+ "MATCH_RECOGNIZE ( "
+ " ORDER BY time "
+ " MEASURES "
+ " MATCH_NUMBER() AS match, "
+ " RUNNING RPR_LAST(totalprice) AS price, "
+ " CLASSIFIER() AS label, "
+ " PREV(CLASSIFIER()) AS prev_label, "
+ " NEXT(CLASSIFIER()) AS next_label "
+ " ALL ROWS PER MATCH "
+ " AFTER MATCH SKIP PAST LAST ROW "
+ " PATTERN ((L | H) A) "
+ " DEFINE "
+ " A AS A.totalprice = 80, "
+ " L AS L.totalprice < 80, "
+ " H AS H.totalprice > 80 "
+ ") AS m";
tableResultSetEqualTest(sql, expectedHeader, retArray1, DATABASE_NAME);
}
@Test
public void testRowPattern() {
String[] expectedHeader = new String[] {"time", "match", "price", "label"};
String[] retArray1 =
new String[] {
"2025-01-01T00:01:00.000Z,1,90.0,A,",
};
String[] retArray2 =
new String[] {
"2025-01-01T00:04:00.000Z,1,70.0,A,",
};
String[] retArray3 =
new String[] {
"2025-01-01T00:02:00.000Z,1,80.0,A,",
"2025-01-01T00:03:00.000Z,1,70.0,B,",
"2025-01-01T00:04:00.000Z,1,70.0,C,",
};
String[] retArray4 =
new String[] {
"2025-01-01T00:01:00.000Z,1,90.0,A,",
"2025-01-01T00:02:00.000Z,2,80.0,B,",
"2025-01-01T00:03:00.000Z,3,70.0,B,",
"2025-01-01T00:04:00.000Z,4,70.0,C,",
};
String[] retArray5 =
new String[] {
"2025-01-01T00:02:00.000Z,1,80.0,B,", "2025-01-01T00:03:00.000Z,1,70.0,C,",
};
String sql =
"SELECT m.time, m.match, m.price, m.label "
+ "FROM t1 "
+ "MATCH_RECOGNIZE ( "
+ " ORDER BY time "
+ " MEASURES "
+ " MATCH_NUMBER() AS match, "
+ " RUNNING RPR_LAST(totalprice) AS price, "
+ " CLASSIFIER() AS label "
+ " ALL ROWS PER MATCH "
+ " AFTER MATCH SKIP PAST LAST ROW "
+ " %s " // PATTERN and DEFINE
+ ") AS m";
// anchor pattern: partition start
tableResultSetEqualTest(
format(sql, "PATTERN (^A) " + "DEFINE A AS true "),
expectedHeader,
retArray1,
DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (A^) " + "DEFINE A AS true "),
expectedHeader,
new String[] {},
DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (^A^) " + "DEFINE A AS true "),
expectedHeader,
new String[] {},
DATABASE_NAME);
// anchor pattern: partition end
tableResultSetEqualTest(
format(sql, "PATTERN (A$) " + "DEFINE A AS true "),
expectedHeader,
retArray2,
DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN ($A) " + "DEFINE A AS true "),
expectedHeader,
new String[] {},
DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN ($A$) " + "DEFINE A AS true "),
expectedHeader,
new String[] {},
DATABASE_NAME);
// pattern concatenation
tableResultSetEqualTest(
format(
sql,
"PATTERN (A B C) "
+ "DEFINE "
+ " B AS B.totalprice < PREV (B.totalprice), "
+ " C AS C.totalprice = PREV (C.totalprice)"),
expectedHeader,
retArray3,
DATABASE_NAME);
// pattern alternation
tableResultSetEqualTest(
format(
sql,
"PATTERN (B | C | A) "
+ "DEFINE "
+ " B AS B.totalprice < PREV (B.totalprice), "
+ " C AS C.totalprice <= PREV (C.totalprice)"),
expectedHeader,
retArray4,
DATABASE_NAME);
// pattern permutation
tableResultSetEqualTest(
format(
sql,
"PATTERN (PERMUTE(B, C)) "
+ "DEFINE "
+ " B AS B.totalprice < PREV (B.totalprice), "
+ " C AS C.totalprice < PREV (C.totalprice)"),
expectedHeader,
retArray5,
DATABASE_NAME);
// grouped pattern
tableResultSetEqualTest(
format(
sql,
"PATTERN (((A) (B (C)))) "
+ "DEFINE "
+ " B AS B.totalprice < PREV (B.totalprice), "
+ " C AS C.totalprice = PREV (C.totalprice)"),
expectedHeader,
retArray3,
DATABASE_NAME);
}
@Test
public void testPatternQuantifier() {
String[] expectedHeader = new String[] {"time", "match", "price", "label"};
String[] retArray1 =
new String[] {
"2025-01-01T00:01:00.000Z,1,null,null,",
"2025-01-01T00:02:00.000Z,2,80.0,B,",
"2025-01-01T00:03:00.000Z,2,70.0,B,",
"2025-01-01T00:04:00.000Z,2,70.0,B,",
};
String[] retArray2 =
new String[] {
"2025-01-01T00:01:00.000Z,1,null,null,",
"2025-01-01T00:02:00.000Z,2,null,null,",
"2025-01-01T00:03:00.000Z,3,null,null,",
"2025-01-01T00:04:00.000Z,4,null,null,",
};
String[] retArray3 =
new String[] {
"2025-01-01T00:02:00.000Z,1,80.0,B,",
"2025-01-01T00:03:00.000Z,1,70.0,B,",
"2025-01-01T00:04:00.000Z,1,70.0,B,",
};
String[] retArray4 =
new String[] {
"2025-01-01T00:02:00.000Z,1,80.0,B,",
"2025-01-01T00:03:00.000Z,2,70.0,B,",
"2025-01-01T00:04:00.000Z,3,70.0,B,",
};
String[] retArray5 =
new String[] {
"2025-01-01T00:01:00.000Z,1,null,null,",
"2025-01-01T00:02:00.000Z,2,80.0,B,",
"2025-01-01T00:03:00.000Z,3,70.0,B,",
"2025-01-01T00:04:00.000Z,4,70.0,B,",
};
String[] retArray6 =
new String[] {
"2025-01-01T00:02:00.000Z,1,80.0,B,", "2025-01-01T00:03:00.000Z,1,70.0,B,",
};
String sql =
"SELECT m.time, m.match, m.price, m.label "
+ "FROM t1 "
+ "MATCH_RECOGNIZE ( "
+ " ORDER BY time "
+ " MEASURES "
+ " MATCH_NUMBER() AS match, "
+ " RUNNING RPR_LAST(totalprice) AS price, "
+ " CLASSIFIER() AS label "
+ " ALL ROWS PER MATCH "
+ " AFTER MATCH SKIP PAST LAST ROW "
+ " %s " // PATTERN
+ " DEFINE "
+ " B AS B.totalprice <= PREV(B.totalprice) "
+ ") AS m";
tableResultSetEqualTest(format(sql, "PATTERN (B*)"), expectedHeader, retArray1, DATABASE_NAME);
tableResultSetEqualTest(format(sql, "PATTERN (B*?)"), expectedHeader, retArray2, DATABASE_NAME);
tableResultSetEqualTest(format(sql, "PATTERN (B+)"), expectedHeader, retArray3, DATABASE_NAME);
tableResultSetEqualTest(format(sql, "PATTERN (B+?)"), expectedHeader, retArray4, DATABASE_NAME);
tableResultSetEqualTest(format(sql, "PATTERN (B?)"), expectedHeader, retArray5, DATABASE_NAME);
tableResultSetEqualTest(format(sql, "PATTERN (B??)"), expectedHeader, retArray2, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{,})"), expectedHeader, retArray1, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{,}?)"), expectedHeader, retArray2, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{1,})"), expectedHeader, retArray3, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{1,}?)"), expectedHeader, retArray4, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{2,})"), expectedHeader, retArray3, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{2,}?)"), expectedHeader, retArray6, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{5,})"), expectedHeader, new String[] {}, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{5,}?)"), expectedHeader, new String[] {}, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{,1})"), expectedHeader, retArray5, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{,1}?)"), expectedHeader, retArray2, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{1,1})"), expectedHeader, retArray4, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{1,1}?)"), expectedHeader, retArray4, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{1})"), expectedHeader, retArray4, DATABASE_NAME);
tableResultSetEqualTest(
format(sql, "PATTERN (B{1}?)"), expectedHeader, retArray4, DATABASE_NAME);
}
}
|
openjdk/jdk8 | 35,991 | jdk/src/share/classes/sun/misc/FpUtils.java | /*
* Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.misc;
import sun.misc.FloatConsts;
import sun.misc.DoubleConsts;
/**
* The class {@code FpUtils} contains static utility methods for
* manipulating and inspecting {@code float} and
* {@code double} floating-point numbers. These methods include
* functionality recommended or required by the IEEE 754
* floating-point standard.
*
* @author Joseph D. Darcy
*/
public class FpUtils {
/*
* The methods in this class are reasonably implemented using
* direct or indirect bit-level manipulation of floating-point
* values. However, having access to the IEEE 754 recommended
* functions would obviate the need for most programmers to engage
* in floating-point bit-twiddling.
*
* An IEEE 754 number has three fields, from most significant bit
* to to least significant, sign, exponent, and significand.
*
* msb lsb
* [sign|exponent| fractional_significand]
*
* Using some encoding cleverness, explained below, the high order
* bit of the logical significand does not need to be explicitly
* stored, thus "fractional_significand" instead of simply
* "significand" in the figure above.
*
* For finite normal numbers, the numerical value encoded is
*
* (-1)^sign * 2^(exponent)*(1.fractional_significand)
*
* Most finite floating-point numbers are normalized; the exponent
* value is reduced until the leading significand bit is 1.
* Therefore, the leading 1 is redundant and is not explicitly
* stored. If a numerical value is so small it cannot be
* normalized, it has a subnormal representation. Subnormal
* numbers don't have a leading 1 in their significand; subnormals
* are encoding using a special exponent value. In other words,
* the high-order bit of the logical significand can be elided in
* from the representation in either case since the bit's value is
* implicit from the exponent value.
*
* The exponent field uses a biased representation; if the bits of
* the exponent are interpreted as a unsigned integer E, the
* exponent represented is E - E_bias where E_bias depends on the
* floating-point format. E can range between E_min and E_max,
* constants which depend on the floating-point format. E_min and
* E_max are -126 and +127 for float, -1022 and +1023 for double.
*
* The 32-bit float format has 1 sign bit, 8 exponent bits, and 23
* bits for the significand (which is logically 24 bits wide
* because of the implicit bit). The 64-bit double format has 1
* sign bit, 11 exponent bits, and 52 bits for the significand
* (logically 53 bits).
*
* Subnormal numbers and zero have the special exponent value
* E_min -1; the numerical value represented by a subnormal is:
*
* (-1)^sign * 2^(E_min)*(0.fractional_significand)
*
* Zero is represented by all zero bits in the exponent and all
* zero bits in the significand; zero can have either sign.
*
* Infinity and NaN are encoded using the exponent value E_max +
* 1. Signed infinities have all significand bits zero; NaNs have
* at least one non-zero significand bit.
*
* The details of IEEE 754 floating-point encoding will be used in
* the methods below without further comment. For further
* exposition on IEEE 754 numbers, see "IEEE Standard for Binary
* Floating-Point Arithmetic" ANSI/IEEE Std 754-1985 or William
* Kahan's "Lecture Notes on the Status of IEEE Standard 754 for
* Binary Floating-Point Arithmetic",
* http://www.cs.berkeley.edu/~wkahan/ieee754status/ieee754.ps.
*
* Many of this class's methods are members of the set of IEEE 754
* recommended functions or similar functions recommended or
* required by IEEE 754R. Discussion of various implementation
* techniques for these functions have occurred in:
*
* W.J. Cody and Jerome T. Coonen, "Algorithm 772 Functions to
* Support the IEEE Standard for Binary Floating-Point
* Arithmetic," ACM Transactions on Mathematical Software,
* vol. 19, no. 4, December 1993, pp. 443-451.
*
* Joseph D. Darcy, "Writing robust IEEE recommended functions in
* ``100% Pure Java''(TM)," University of California, Berkeley
* technical report UCB//CSD-98-1009.
*/
/**
* Don't let anyone instantiate this class.
*/
private FpUtils() {}
// Helper Methods
// The following helper methods are used in the implementation of
// the public recommended functions; they generally omit certain
// tests for exception cases.
/**
* Returns unbiased exponent of a {@code double}.
* @deprecated Use Math.getExponent.
*/
@Deprecated
public static int getExponent(double d){
return Math.getExponent(d);
}
/**
* Returns unbiased exponent of a {@code float}.
* @deprecated Use Math.getExponent.
*/
@Deprecated
public static int getExponent(float f){
return Math.getExponent(f);
}
/**
* Returns the first floating-point argument with the sign of the
* second floating-point argument. Note that unlike the {@link
* FpUtils#copySign(double, double) copySign} method, this method
* does not require NaN {@code sign} arguments to be treated
* as positive values; implementations are permitted to treat some
* NaN arguments as positive and other NaN arguments as negative
* to allow greater performance.
*
* @param magnitude the parameter providing the magnitude of the result
* @param sign the parameter providing the sign of the result
* @return a value with the magnitude of {@code magnitude}
* and the sign of {@code sign}.
* @author Joseph D. Darcy
* @deprecated Use Math.copySign.
*/
@Deprecated
public static double rawCopySign(double magnitude, double sign) {
return Math.copySign(magnitude, sign);
}
/**
* Returns the first floating-point argument with the sign of the
* second floating-point argument. Note that unlike the {@link
* FpUtils#copySign(float, float) copySign} method, this method
* does not require NaN {@code sign} arguments to be treated
* as positive values; implementations are permitted to treat some
* NaN arguments as positive and other NaN arguments as negative
* to allow greater performance.
*
* @param magnitude the parameter providing the magnitude of the result
* @param sign the parameter providing the sign of the result
* @return a value with the magnitude of {@code magnitude}
* and the sign of {@code sign}.
* @author Joseph D. Darcy
* @deprecated Use Math.copySign.
*/
@Deprecated
public static float rawCopySign(float magnitude, float sign) {
return Math.copySign(magnitude, sign);
}
/* ***************************************************************** */
/**
* Returns {@code true} if the argument is a finite
* floating-point value; returns {@code false} otherwise (for
* NaN and infinity arguments).
*
* @param d the {@code double} value to be tested
* @return {@code true} if the argument is a finite
* floating-point value, {@code false} otherwise.
* @deprecated Use Double.isFinite.
*/
@Deprecated
public static boolean isFinite(double d) {
return Double.isFinite(d);
}
/**
* Returns {@code true} if the argument is a finite
* floating-point value; returns {@code false} otherwise (for
* NaN and infinity arguments).
*
* @param f the {@code float} value to be tested
* @return {@code true} if the argument is a finite
* floating-point value, {@code false} otherwise.
* @deprecated Use Float.isFinite.
*/
@Deprecated
public static boolean isFinite(float f) {
return Float.isFinite(f);
}
/**
* Returns {@code true} if the specified number is infinitely
* large in magnitude, {@code false} otherwise.
*
* <p>Note that this method is equivalent to the {@link
* Double#isInfinite(double) Double.isInfinite} method; the
* functionality is included in this class for convenience.
*
* @param d the value to be tested.
* @return {@code true} if the value of the argument is positive
* infinity or negative infinity; {@code false} otherwise.
*/
public static boolean isInfinite(double d) {
return Double.isInfinite(d);
}
/**
* Returns {@code true} if the specified number is infinitely
* large in magnitude, {@code false} otherwise.
*
* <p>Note that this method is equivalent to the {@link
* Float#isInfinite(float) Float.isInfinite} method; the
* functionality is included in this class for convenience.
*
* @param f the value to be tested.
* @return {@code true} if the argument is positive infinity or
* negative infinity; {@code false} otherwise.
*/
public static boolean isInfinite(float f) {
return Float.isInfinite(f);
}
/**
* Returns {@code true} if the specified number is a
* Not-a-Number (NaN) value, {@code false} otherwise.
*
* <p>Note that this method is equivalent to the {@link
* Double#isNaN(double) Double.isNaN} method; the functionality is
* included in this class for convenience.
*
* @param d the value to be tested.
* @return {@code true} if the value of the argument is NaN;
* {@code false} otherwise.
*/
public static boolean isNaN(double d) {
return Double.isNaN(d);
}
/**
* Returns {@code true} if the specified number is a
* Not-a-Number (NaN) value, {@code false} otherwise.
*
* <p>Note that this method is equivalent to the {@link
* Float#isNaN(float) Float.isNaN} method; the functionality is
* included in this class for convenience.
*
* @param f the value to be tested.
* @return {@code true} if the argument is NaN;
* {@code false} otherwise.
*/
public static boolean isNaN(float f) {
return Float.isNaN(f);
}
/**
* Returns {@code true} if the unordered relation holds
* between the two arguments. When two floating-point values are
* unordered, one value is neither less than, equal to, nor
* greater than the other. For the unordered relation to be true,
* at least one argument must be a {@code NaN}.
*
* @param arg1 the first argument
* @param arg2 the second argument
* @return {@code true} if at least one argument is a NaN,
* {@code false} otherwise.
*/
public static boolean isUnordered(double arg1, double arg2) {
return isNaN(arg1) || isNaN(arg2);
}
/**
* Returns {@code true} if the unordered relation holds
* between the two arguments. When two floating-point values are
* unordered, one value is neither less than, equal to, nor
* greater than the other. For the unordered relation to be true,
* at least one argument must be a {@code NaN}.
*
* @param arg1 the first argument
* @param arg2 the second argument
* @return {@code true} if at least one argument is a NaN,
* {@code false} otherwise.
*/
public static boolean isUnordered(float arg1, float arg2) {
return isNaN(arg1) || isNaN(arg2);
}
/**
* Returns unbiased exponent of a {@code double}; for
* subnormal values, the number is treated as if it were
* normalized. That is for all finite, non-zero, positive numbers
* <i>x</i>, <code>scalb(<i>x</i>, -ilogb(<i>x</i>))</code> is
* always in the range [1, 2).
* <p>
* Special cases:
* <ul>
* <li> If the argument is NaN, then the result is 2<sup>30</sup>.
* <li> If the argument is infinite, then the result is 2<sup>28</sup>.
* <li> If the argument is zero, then the result is -(2<sup>28</sup>).
* </ul>
*
* @param d floating-point number whose exponent is to be extracted
* @return unbiased exponent of the argument.
* @author Joseph D. Darcy
*/
public static int ilogb(double d) {
int exponent = getExponent(d);
switch (exponent) {
case DoubleConsts.MAX_EXPONENT+1: // NaN or infinity
if( isNaN(d) )
return (1<<30); // 2^30
else // infinite value
return (1<<28); // 2^28
case DoubleConsts.MIN_EXPONENT-1: // zero or subnormal
if(d == 0.0) {
return -(1<<28); // -(2^28)
}
else {
long transducer = Double.doubleToRawLongBits(d);
/*
* To avoid causing slow arithmetic on subnormals,
* the scaling to determine when d's significand
* is normalized is done in integer arithmetic.
* (there must be at least one "1" bit in the
* significand since zero has been screened out.
*/
// isolate significand bits
transducer &= DoubleConsts.SIGNIF_BIT_MASK;
assert(transducer != 0L);
// This loop is simple and functional. We might be
// able to do something more clever that was faster;
// e.g. number of leading zero detection on
// (transducer << (# exponent and sign bits).
while (transducer <
(1L << (DoubleConsts.SIGNIFICAND_WIDTH - 1))) {
transducer *= 2;
exponent--;
}
exponent++;
assert( exponent >=
DoubleConsts.MIN_EXPONENT - (DoubleConsts.SIGNIFICAND_WIDTH-1) &&
exponent < DoubleConsts.MIN_EXPONENT);
return exponent;
}
default:
assert( exponent >= DoubleConsts.MIN_EXPONENT &&
exponent <= DoubleConsts.MAX_EXPONENT);
return exponent;
}
}
/**
* Returns unbiased exponent of a {@code float}; for
* subnormal values, the number is treated as if it were
* normalized. That is for all finite, non-zero, positive numbers
* <i>x</i>, <code>scalb(<i>x</i>, -ilogb(<i>x</i>))</code> is
* always in the range [1, 2).
* <p>
* Special cases:
* <ul>
* <li> If the argument is NaN, then the result is 2<sup>30</sup>.
* <li> If the argument is infinite, then the result is 2<sup>28</sup>.
* <li> If the argument is zero, then the result is -(2<sup>28</sup>).
* </ul>
*
* @param f floating-point number whose exponent is to be extracted
* @return unbiased exponent of the argument.
* @author Joseph D. Darcy
*/
public static int ilogb(float f) {
int exponent = getExponent(f);
switch (exponent) {
case FloatConsts.MAX_EXPONENT+1: // NaN or infinity
if( isNaN(f) )
return (1<<30); // 2^30
else // infinite value
return (1<<28); // 2^28
case FloatConsts.MIN_EXPONENT-1: // zero or subnormal
if(f == 0.0f) {
return -(1<<28); // -(2^28)
}
else {
int transducer = Float.floatToRawIntBits(f);
/*
* To avoid causing slow arithmetic on subnormals,
* the scaling to determine when f's significand
* is normalized is done in integer arithmetic.
* (there must be at least one "1" bit in the
* significand since zero has been screened out.
*/
// isolate significand bits
transducer &= FloatConsts.SIGNIF_BIT_MASK;
assert(transducer != 0);
// This loop is simple and functional. We might be
// able to do something more clever that was faster;
// e.g. number of leading zero detection on
// (transducer << (# exponent and sign bits).
while (transducer <
(1 << (FloatConsts.SIGNIFICAND_WIDTH - 1))) {
transducer *= 2;
exponent--;
}
exponent++;
assert( exponent >=
FloatConsts.MIN_EXPONENT - (FloatConsts.SIGNIFICAND_WIDTH-1) &&
exponent < FloatConsts.MIN_EXPONENT);
return exponent;
}
default:
assert( exponent >= FloatConsts.MIN_EXPONENT &&
exponent <= FloatConsts.MAX_EXPONENT);
return exponent;
}
}
/*
* The scalb operation should be reasonably fast; however, there
* are tradeoffs in writing a method to minimize the worst case
* performance and writing a method to minimize the time for
* expected common inputs. Some processors operate very slowly on
* subnormal operands, taking hundreds or thousands of cycles for
* one floating-point add or multiply as opposed to, say, four
* cycles for normal operands. For processors with very slow
* subnormal execution, scalb would be fastest if written entirely
* with integer operations; in other words, scalb would need to
* include the logic of performing correct rounding of subnormal
* values. This could be reasonably done in at most a few hundred
* cycles. However, this approach may penalize normal operations
* since at least the exponent of the floating-point argument must
* be examined.
*
* The approach taken in this implementation is a compromise.
* Floating-point multiplication is used to do most of the work;
* but knowingly multiplying by a subnormal scaling factor is
* avoided. However, the floating-point argument is not examined
* to see whether or not it is subnormal since subnormal inputs
* are assumed to be rare. At most three multiplies are needed to
* scale from the largest to smallest exponent ranges (scaling
* down, at most two multiplies are needed if subnormal scaling
* factors are allowed). However, in this implementation an
* expensive integer remainder operation is avoided at the cost of
* requiring five floating-point multiplies in the worst case,
* which should still be a performance win.
*
* If scaling of entire arrays is a concern, it would probably be
* more efficient to provide a double[] scalb(double[], int)
* version of scalb to avoid having to recompute the needed
* scaling factors for each floating-point value.
*/
/**
* Return {@code d} ×
* 2<sup>{@code scale_factor}</sup> rounded as if performed
* by a single correctly rounded floating-point multiply to a
* member of the double value set. See section 4.2.3 of
* <cite>The Java™ Language Specification</cite>
* for a discussion of floating-point
* value sets. If the exponent of the result is between the
* {@code double}'s minimum exponent and maximum exponent,
* the answer is calculated exactly. If the exponent of the
* result would be larger than {@code doubles}'s maximum
* exponent, an infinity is returned. Note that if the result is
* subnormal, precision may be lost; that is, when {@code scalb(x,
* n)} is subnormal, {@code scalb(scalb(x, n), -n)} may
* not equal <i>x</i>. When the result is non-NaN, the result has
* the same sign as {@code d}.
*
*<p>
* Special cases:
* <ul>
* <li> If the first argument is NaN, NaN is returned.
* <li> If the first argument is infinite, then an infinity of the
* same sign is returned.
* <li> If the first argument is zero, then a zero of the same
* sign is returned.
* </ul>
*
* @param d number to be scaled by a power of two.
* @param scale_factor power of 2 used to scale {@code d}
* @return {@code d * }2<sup>{@code scale_factor}</sup>
* @author Joseph D. Darcy
* @deprecated Use Math.scalb.
*/
@Deprecated
public static double scalb(double d, int scale_factor) {
return Math.scalb(d, scale_factor);
}
/**
* Return {@code f} ×
* 2<sup>{@code scale_factor}</sup> rounded as if performed
* by a single correctly rounded floating-point multiply to a
* member of the float value set. See section 4.2.3 of
* <cite>The Java™ Language Specification</cite>
* for a discussion of floating-point
* value sets. If the exponent of the result is between the
* {@code float}'s minimum exponent and maximum exponent, the
* answer is calculated exactly. If the exponent of the result
* would be larger than {@code float}'s maximum exponent, an
* infinity is returned. Note that if the result is subnormal,
* precision may be lost; that is, when {@code scalb(x, n)}
* is subnormal, {@code scalb(scalb(x, n), -n)} may not equal
* <i>x</i>. When the result is non-NaN, the result has the same
* sign as {@code f}.
*
*<p>
* Special cases:
* <ul>
* <li> If the first argument is NaN, NaN is returned.
* <li> If the first argument is infinite, then an infinity of the
* same sign is returned.
* <li> If the first argument is zero, then a zero of the same
* sign is returned.
* </ul>
*
* @param f number to be scaled by a power of two.
* @param scale_factor power of 2 used to scale {@code f}
* @return {@code f * }2<sup>{@code scale_factor}</sup>
* @author Joseph D. Darcy
* @deprecated Use Math.scalb.
*/
@Deprecated
public static float scalb(float f, int scale_factor) {
return Math.scalb(f, scale_factor);
}
/**
* Returns the floating-point number adjacent to the first
* argument in the direction of the second argument. If both
* arguments compare as equal the second argument is returned.
*
* <p>
* Special cases:
* <ul>
* <li> If either argument is a NaN, then NaN is returned.
*
* <li> If both arguments are signed zeros, {@code direction}
* is returned unchanged (as implied by the requirement of
* returning the second argument if the arguments compare as
* equal).
*
* <li> If {@code start} is
* ±{@code Double.MIN_VALUE} and {@code direction}
* has a value such that the result should have a smaller
* magnitude, then a zero with the same sign as {@code start}
* is returned.
*
* <li> If {@code start} is infinite and
* {@code direction} has a value such that the result should
* have a smaller magnitude, {@code Double.MAX_VALUE} with the
* same sign as {@code start} is returned.
*
* <li> If {@code start} is equal to ±
* {@code Double.MAX_VALUE} and {@code direction} has a
* value such that the result should have a larger magnitude, an
* infinity with same sign as {@code start} is returned.
* </ul>
*
* @param start starting floating-point value
* @param direction value indicating which of
* {@code start}'s neighbors or {@code start} should
* be returned
* @return The floating-point number adjacent to {@code start} in the
* direction of {@code direction}.
* @author Joseph D. Darcy
* @deprecated Use Math.nextAfter
*/
@Deprecated
public static double nextAfter(double start, double direction) {
return Math.nextAfter(start, direction);
}
/**
* Returns the floating-point number adjacent to the first
* argument in the direction of the second argument. If both
* arguments compare as equal, the second argument is returned.
*
* <p>
* Special cases:
* <ul>
* <li> If either argument is a NaN, then NaN is returned.
*
* <li> If both arguments are signed zeros, a {@code float}
* zero with the same sign as {@code direction} is returned
* (as implied by the requirement of returning the second argument
* if the arguments compare as equal).
*
* <li> If {@code start} is
* ±{@code Float.MIN_VALUE} and {@code direction}
* has a value such that the result should have a smaller
* magnitude, then a zero with the same sign as {@code start}
* is returned.
*
* <li> If {@code start} is infinite and
* {@code direction} has a value such that the result should
* have a smaller magnitude, {@code Float.MAX_VALUE} with the
* same sign as {@code start} is returned.
*
* <li> If {@code start} is equal to ±
* {@code Float.MAX_VALUE} and {@code direction} has a
* value such that the result should have a larger magnitude, an
* infinity with same sign as {@code start} is returned.
* </ul>
*
* @param start starting floating-point value
* @param direction value indicating which of
* {@code start}'s neighbors or {@code start} should
* be returned
* @return The floating-point number adjacent to {@code start} in the
* direction of {@code direction}.
* @author Joseph D. Darcy
* @deprecated Use Math.nextAfter.
*/
@Deprecated
public static float nextAfter(float start, double direction) {
return Math.nextAfter(start, direction);
}
/**
* Returns the floating-point value adjacent to {@code d} in
* the direction of positive infinity. This method is
* semantically equivalent to {@code nextAfter(d,
* Double.POSITIVE_INFINITY)}; however, a {@code nextUp}
* implementation may run faster than its equivalent
* {@code nextAfter} call.
*
* <p>Special Cases:
* <ul>
* <li> If the argument is NaN, the result is NaN.
*
* <li> If the argument is positive infinity, the result is
* positive infinity.
*
* <li> If the argument is zero, the result is
* {@code Double.MIN_VALUE}
*
* </ul>
*
* @param d starting floating-point value
* @return The adjacent floating-point value closer to positive
* infinity.
* @author Joseph D. Darcy
* @deprecated use Math.nextUp.
*/
@Deprecated
public static double nextUp(double d) {
return Math.nextUp(d);
}
/**
* Returns the floating-point value adjacent to {@code f} in
* the direction of positive infinity. This method is
* semantically equivalent to {@code nextAfter(f,
* Double.POSITIVE_INFINITY)}; however, a {@code nextUp}
* implementation may run faster than its equivalent
* {@code nextAfter} call.
*
* <p>Special Cases:
* <ul>
* <li> If the argument is NaN, the result is NaN.
*
* <li> If the argument is positive infinity, the result is
* positive infinity.
*
* <li> If the argument is zero, the result is
* {@code Float.MIN_VALUE}
*
* </ul>
*
* @param f starting floating-point value
* @return The adjacent floating-point value closer to positive
* infinity.
* @author Joseph D. Darcy
* @deprecated Use Math.nextUp.
*/
@Deprecated
public static float nextUp(float f) {
return Math.nextUp(f);
}
/**
* Returns the floating-point value adjacent to {@code d} in
* the direction of negative infinity. This method is
* semantically equivalent to {@code nextAfter(d,
* Double.NEGATIVE_INFINITY)}; however, a
* {@code nextDown} implementation may run faster than its
* equivalent {@code nextAfter} call.
*
* <p>Special Cases:
* <ul>
* <li> If the argument is NaN, the result is NaN.
*
* <li> If the argument is negative infinity, the result is
* negative infinity.
*
* <li> If the argument is zero, the result is
* {@code -Double.MIN_VALUE}
*
* </ul>
*
* @param d starting floating-point value
* @return The adjacent floating-point value closer to negative
* infinity.
* @author Joseph D. Darcy
* @deprecated Use Math.nextDown.
*/
@Deprecated
public static double nextDown(double d) {
return Math.nextDown(d);
}
/**
* Returns the floating-point value adjacent to {@code f} in
* the direction of negative infinity. This method is
* semantically equivalent to {@code nextAfter(f,
* Float.NEGATIVE_INFINITY)}; however, a
* {@code nextDown} implementation may run faster than its
* equivalent {@code nextAfter} call.
*
* <p>Special Cases:
* <ul>
* <li> If the argument is NaN, the result is NaN.
*
* <li> If the argument is negative infinity, the result is
* negative infinity.
*
* <li> If the argument is zero, the result is
* {@code -Float.MIN_VALUE}
*
* </ul>
*
* @param f starting floating-point value
* @return The adjacent floating-point value closer to negative
* infinity.
* @author Joseph D. Darcy
* @deprecated Use Math.nextDown.
*/
@Deprecated
public static double nextDown(float f) {
return Math.nextDown(f);
}
/**
* Returns the first floating-point argument with the sign of the
* second floating-point argument. For this method, a NaN
* {@code sign} argument is always treated as if it were
* positive.
*
* @param magnitude the parameter providing the magnitude of the result
* @param sign the parameter providing the sign of the result
* @return a value with the magnitude of {@code magnitude}
* and the sign of {@code sign}.
* @author Joseph D. Darcy
* @since 1.5
* @deprecated Use StrictMath.copySign.
*/
@Deprecated
public static double copySign(double magnitude, double sign) {
return StrictMath.copySign(magnitude, sign);
}
/**
* Returns the first floating-point argument with the sign of the
* second floating-point argument. For this method, a NaN
* {@code sign} argument is always treated as if it were
* positive.
*
* @param magnitude the parameter providing the magnitude of the result
* @param sign the parameter providing the sign of the result
* @return a value with the magnitude of {@code magnitude}
* and the sign of {@code sign}.
* @author Joseph D. Darcy
* @deprecated Use StrictMath.copySign.
*/
@Deprecated
public static float copySign(float magnitude, float sign) {
return StrictMath.copySign(magnitude, sign);
}
/**
* Returns the size of an ulp of the argument. An ulp of a
* {@code double} value is the positive distance between this
* floating-point value and the {@code double} value next
* larger in magnitude. Note that for non-NaN <i>x</i>,
* <code>ulp(-<i>x</i>) == ulp(<i>x</i>)</code>.
*
* <p>Special Cases:
* <ul>
* <li> If the argument is NaN, then the result is NaN.
* <li> If the argument is positive or negative infinity, then the
* result is positive infinity.
* <li> If the argument is positive or negative zero, then the result is
* {@code Double.MIN_VALUE}.
* <li> If the argument is ±{@code Double.MAX_VALUE}, then
* the result is equal to 2<sup>971</sup>.
* </ul>
*
* @param d the floating-point value whose ulp is to be returned
* @return the size of an ulp of the argument
* @author Joseph D. Darcy
* @since 1.5
* @deprecated Use Math.ulp.
*/
@Deprecated
public static double ulp(double d) {
return Math.ulp(d);
}
/**
* Returns the size of an ulp of the argument. An ulp of a
* {@code float} value is the positive distance between this
* floating-point value and the {@code float} value next
* larger in magnitude. Note that for non-NaN <i>x</i>,
* <code>ulp(-<i>x</i>) == ulp(<i>x</i>)</code>.
*
* <p>Special Cases:
* <ul>
* <li> If the argument is NaN, then the result is NaN.
* <li> If the argument is positive or negative infinity, then the
* result is positive infinity.
* <li> If the argument is positive or negative zero, then the result is
* {@code Float.MIN_VALUE}.
* <li> If the argument is ±{@code Float.MAX_VALUE}, then
* the result is equal to 2<sup>104</sup>.
* </ul>
*
* @param f the floating-point value whose ulp is to be returned
* @return the size of an ulp of the argument
* @author Joseph D. Darcy
* @since 1.5
* @deprecated Use Math.ulp.
*/
@Deprecated
public static float ulp(float f) {
return Math.ulp(f);
}
/**
* Returns the signum function of the argument; zero if the argument
* is zero, 1.0 if the argument is greater than zero, -1.0 if the
* argument is less than zero.
*
* <p>Special Cases:
* <ul>
* <li> If the argument is NaN, then the result is NaN.
* <li> If the argument is positive zero or negative zero, then the
* result is the same as the argument.
* </ul>
*
* @param d the floating-point value whose signum is to be returned
* @return the signum function of the argument
* @author Joseph D. Darcy
* @since 1.5
* @deprecated Use Math.signum.
*/
@Deprecated
public static double signum(double d) {
return Math.signum(d);
}
/**
* Returns the signum function of the argument; zero if the argument
* is zero, 1.0f if the argument is greater than zero, -1.0f if the
* argument is less than zero.
*
* <p>Special Cases:
* <ul>
* <li> If the argument is NaN, then the result is NaN.
* <li> If the argument is positive zero or negative zero, then the
* result is the same as the argument.
* </ul>
*
* @param f the floating-point value whose signum is to be returned
* @return the signum function of the argument
* @author Joseph D. Darcy
* @since 1.5
* @deprecated Use Math.signum.
*/
@Deprecated
public static float signum(float f) {
return Math.signum(f);
}
}
|
googleapis/google-cloud-java | 35,724 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/RegionInstanceGroupsListInstancesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest}
*/
public final class RegionInstanceGroupsListInstancesRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest)
RegionInstanceGroupsListInstancesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use RegionInstanceGroupsListInstancesRequest.newBuilder() to construct.
private RegionInstanceGroupsListInstancesRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RegionInstanceGroupsListInstancesRequest() {
instanceState_ = "";
portName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RegionInstanceGroupsListInstancesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupsListInstancesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupsListInstancesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest.class,
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest.Builder.class);
}
/**
*
*
* <pre>
* Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances.
* </pre>
*
* Protobuf enum {@code
* google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest.InstanceState}
*/
public enum InstanceState implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* A value indicating that the enum field is not set.
* </pre>
*
* <code>UNDEFINED_INSTANCE_STATE = 0;</code>
*/
UNDEFINED_INSTANCE_STATE(0),
/**
*
*
* <pre>
* Matches any status of the instances, running, non-running and others.
* </pre>
*
* <code>ALL = 64897;</code>
*/
ALL(64897),
/**
*
*
* <pre>
* Instance is in RUNNING state if it is running.
* </pre>
*
* <code>RUNNING = 121282975;</code>
*/
RUNNING(121282975),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* A value indicating that the enum field is not set.
* </pre>
*
* <code>UNDEFINED_INSTANCE_STATE = 0;</code>
*/
public static final int UNDEFINED_INSTANCE_STATE_VALUE = 0;
/**
*
*
* <pre>
* Matches any status of the instances, running, non-running and others.
* </pre>
*
* <code>ALL = 64897;</code>
*/
public static final int ALL_VALUE = 64897;
/**
*
*
* <pre>
* Instance is in RUNNING state if it is running.
* </pre>
*
* <code>RUNNING = 121282975;</code>
*/
public static final int RUNNING_VALUE = 121282975;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static InstanceState valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static InstanceState forNumber(int value) {
switch (value) {
case 0:
return UNDEFINED_INSTANCE_STATE;
case 64897:
return ALL;
case 121282975:
return RUNNING;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<InstanceState> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<InstanceState> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<InstanceState>() {
public InstanceState findValueByNumber(int number) {
return InstanceState.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final InstanceState[] VALUES = values();
public static InstanceState valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private InstanceState(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest.InstanceState)
}
private int bitField0_;
public static final int INSTANCE_STATE_FIELD_NUMBER = 92223591;
@SuppressWarnings("serial")
private volatile java.lang.Object instanceState_ = "";
/**
*
*
* <pre>
* Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances.
* Check the InstanceState enum for the list of possible values.
* </pre>
*
* <code>optional string instance_state = 92223591;</code>
*
* @return Whether the instanceState field is set.
*/
@java.lang.Override
public boolean hasInstanceState() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances.
* Check the InstanceState enum for the list of possible values.
* </pre>
*
* <code>optional string instance_state = 92223591;</code>
*
* @return The instanceState.
*/
@java.lang.Override
public java.lang.String getInstanceState() {
java.lang.Object ref = instanceState_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceState_ = s;
return s;
}
}
/**
*
*
* <pre>
* Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances.
* Check the InstanceState enum for the list of possible values.
* </pre>
*
* <code>optional string instance_state = 92223591;</code>
*
* @return The bytes for instanceState.
*/
@java.lang.Override
public com.google.protobuf.ByteString getInstanceStateBytes() {
java.lang.Object ref = instanceState_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceState_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PORT_NAME_FIELD_NUMBER = 41534345;
@SuppressWarnings("serial")
private volatile java.lang.Object portName_ = "";
/**
*
*
* <pre>
* Name of port user is interested in. It is optional. If it is set, only information about this ports will be returned. If it is not set, all the named ports will be returned. Always lists all instances.
* </pre>
*
* <code>optional string port_name = 41534345;</code>
*
* @return Whether the portName field is set.
*/
@java.lang.Override
public boolean hasPortName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Name of port user is interested in. It is optional. If it is set, only information about this ports will be returned. If it is not set, all the named ports will be returned. Always lists all instances.
* </pre>
*
* <code>optional string port_name = 41534345;</code>
*
* @return The portName.
*/
@java.lang.Override
public java.lang.String getPortName() {
java.lang.Object ref = portName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
portName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of port user is interested in. It is optional. If it is set, only information about this ports will be returned. If it is not set, all the named ports will be returned. Always lists all instances.
* </pre>
*
* <code>optional string port_name = 41534345;</code>
*
* @return The bytes for portName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPortNameBytes() {
java.lang.Object ref = portName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
portName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000002) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 41534345, portName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 92223591, instanceState_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(41534345, portName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(92223591, instanceState_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest other =
(com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest) obj;
if (hasInstanceState() != other.hasInstanceState()) return false;
if (hasInstanceState()) {
if (!getInstanceState().equals(other.getInstanceState())) return false;
}
if (hasPortName() != other.hasPortName()) return false;
if (hasPortName()) {
if (!getPortName().equals(other.getPortName())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasInstanceState()) {
hash = (37 * hash) + INSTANCE_STATE_FIELD_NUMBER;
hash = (53 * hash) + getInstanceState().hashCode();
}
if (hasPortName()) {
hash = (37 * hash) + PORT_NAME_FIELD_NUMBER;
hash = (53 * hash) + getPortName().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest)
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupsListInstancesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupsListInstancesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest.class,
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest.Builder.class);
}
// Construct using
// com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
instanceState_ = "";
portName_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_RegionInstanceGroupsListInstancesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest build() {
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest buildPartial() {
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest result =
new com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.instanceState_ = instanceState_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.portName_ = portName_;
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest) {
return mergeFrom(
(com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest other) {
if (other
== com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest
.getDefaultInstance()) return this;
if (other.hasInstanceState()) {
instanceState_ = other.instanceState_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasPortName()) {
portName_ = other.portName_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 332274762:
{
portName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 332274762
case 737788730:
{
instanceState_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 737788730
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object instanceState_ = "";
/**
*
*
* <pre>
* Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances.
* Check the InstanceState enum for the list of possible values.
* </pre>
*
* <code>optional string instance_state = 92223591;</code>
*
* @return Whether the instanceState field is set.
*/
public boolean hasInstanceState() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances.
* Check the InstanceState enum for the list of possible values.
* </pre>
*
* <code>optional string instance_state = 92223591;</code>
*
* @return The instanceState.
*/
public java.lang.String getInstanceState() {
java.lang.Object ref = instanceState_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
instanceState_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances.
* Check the InstanceState enum for the list of possible values.
* </pre>
*
* <code>optional string instance_state = 92223591;</code>
*
* @return The bytes for instanceState.
*/
public com.google.protobuf.ByteString getInstanceStateBytes() {
java.lang.Object ref = instanceState_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
instanceState_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances.
* Check the InstanceState enum for the list of possible values.
* </pre>
*
* <code>optional string instance_state = 92223591;</code>
*
* @param value The instanceState to set.
* @return This builder for chaining.
*/
public Builder setInstanceState(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
instanceState_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances.
* Check the InstanceState enum for the list of possible values.
* </pre>
*
* <code>optional string instance_state = 92223591;</code>
*
* @return This builder for chaining.
*/
public Builder clearInstanceState() {
instanceState_ = getDefaultInstance().getInstanceState();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Instances in which state should be returned. Valid options are: 'ALL', 'RUNNING'. By default, it lists all instances.
* Check the InstanceState enum for the list of possible values.
* </pre>
*
* <code>optional string instance_state = 92223591;</code>
*
* @param value The bytes for instanceState to set.
* @return This builder for chaining.
*/
public Builder setInstanceStateBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
instanceState_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object portName_ = "";
/**
*
*
* <pre>
* Name of port user is interested in. It is optional. If it is set, only information about this ports will be returned. If it is not set, all the named ports will be returned. Always lists all instances.
* </pre>
*
* <code>optional string port_name = 41534345;</code>
*
* @return Whether the portName field is set.
*/
public boolean hasPortName() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Name of port user is interested in. It is optional. If it is set, only information about this ports will be returned. If it is not set, all the named ports will be returned. Always lists all instances.
* </pre>
*
* <code>optional string port_name = 41534345;</code>
*
* @return The portName.
*/
public java.lang.String getPortName() {
java.lang.Object ref = portName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
portName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of port user is interested in. It is optional. If it is set, only information about this ports will be returned. If it is not set, all the named ports will be returned. Always lists all instances.
* </pre>
*
* <code>optional string port_name = 41534345;</code>
*
* @return The bytes for portName.
*/
public com.google.protobuf.ByteString getPortNameBytes() {
java.lang.Object ref = portName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
portName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of port user is interested in. It is optional. If it is set, only information about this ports will be returned. If it is not set, all the named ports will be returned. Always lists all instances.
* </pre>
*
* <code>optional string port_name = 41534345;</code>
*
* @param value The portName to set.
* @return This builder for chaining.
*/
public Builder setPortName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
portName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of port user is interested in. It is optional. If it is set, only information about this ports will be returned. If it is not set, all the named ports will be returned. Always lists all instances.
* </pre>
*
* <code>optional string port_name = 41534345;</code>
*
* @return This builder for chaining.
*/
public Builder clearPortName() {
portName_ = getDefaultInstance().getPortName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of port user is interested in. It is optional. If it is set, only information about this ports will be returned. If it is not set, all the named ports will be returned. Always lists all instances.
* </pre>
*
* <code>optional string port_name = 41534345;</code>
*
* @param value The bytes for portName to set.
* @return This builder for chaining.
*/
public Builder setPortNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
portName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest)
private static final com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest();
}
public static com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RegionInstanceGroupsListInstancesRequest> PARSER =
new com.google.protobuf.AbstractParser<RegionInstanceGroupsListInstancesRequest>() {
@java.lang.Override
public RegionInstanceGroupsListInstancesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RegionInstanceGroupsListInstancesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RegionInstanceGroupsListInstancesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.RegionInstanceGroupsListInstancesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/kafka | 36,005 | clients/src/test/java/org/apache/kafka/common/record/FileRecordsTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.record;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.compress.Compression;
import org.apache.kafka.common.header.Header;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.network.TransferableChannel;
import org.apache.kafka.test.TestUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.Mockito;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.stream.IntStream;
import static java.util.Arrays.asList;
import static org.apache.kafka.test.TestUtils.tempFile;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyLong;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class FileRecordsTest {
private final byte[][] values = new byte[][] {
"abcd".getBytes(),
"efgh".getBytes(),
"ijkl".getBytes()
};
private FileRecords fileRecords;
@BeforeEach
public void setup() throws IOException {
this.fileRecords = createFileRecords(values);
}
@AfterEach
public void cleanup() throws IOException {
this.fileRecords.close();
}
@Test
public void testAppendProtectsFromOverflow() throws Exception {
File fileMock = mock(File.class);
FileChannel fileChannelMock = mock(FileChannel.class);
when(fileChannelMock.size()).thenReturn((long) Integer.MAX_VALUE);
FileRecords records = new FileRecords(fileMock, fileChannelMock, Integer.MAX_VALUE);
assertThrows(IllegalArgumentException.class, () -> append(records, values));
}
@Test
public void testOpenOversizeFile() throws Exception {
File fileMock = mock(File.class);
FileChannel fileChannelMock = mock(FileChannel.class);
when(fileChannelMock.size()).thenReturn(Integer.MAX_VALUE + 5L);
assertThrows(KafkaException.class, () -> new FileRecords(fileMock, fileChannelMock, Integer.MAX_VALUE));
}
@Test
public void testOutOfRangeSlice() {
assertThrows(IllegalArgumentException.class,
() -> this.fileRecords.slice(fileRecords.sizeInBytes() + 1, 15).sizeInBytes());
}
/**
* Test that the cached size variable matches the actual file size as we append messages
*/
@Test
public void testFileSize() throws IOException {
assertEquals(fileRecords.channel().size(), fileRecords.sizeInBytes());
for (int i = 0; i < 20; i++) {
fileRecords.append(MemoryRecords.withRecords(Compression.NONE, new SimpleRecord("abcd".getBytes())));
assertEquals(fileRecords.channel().size(), fileRecords.sizeInBytes());
}
}
/**
* Test that adding invalid bytes to the end of the log doesn't break iteration
*/
@Test
public void testIterationOverPartialAndTruncation() throws IOException {
testPartialWrite(0, fileRecords);
testPartialWrite(2, fileRecords);
testPartialWrite(4, fileRecords);
testPartialWrite(5, fileRecords);
testPartialWrite(6, fileRecords);
}
@Test
public void testSliceSizeLimitWithConcurrentWrite() throws Exception {
FileRecords log = FileRecords.open(tempFile());
ExecutorService executor = Executors.newFixedThreadPool(2);
int maxSizeInBytes = 16384;
try {
Future<Object> readerCompletion = executor.submit(() -> {
while (log.sizeInBytes() < maxSizeInBytes) {
int currentSize = log.sizeInBytes();
Records slice = log.slice(0, currentSize);
assertEquals(currentSize, slice.sizeInBytes());
}
return null;
});
Future<Object> writerCompletion = executor.submit(() -> {
while (log.sizeInBytes() < maxSizeInBytes) {
append(log, values);
}
return null;
});
writerCompletion.get();
readerCompletion.get();
} finally {
executor.shutdownNow();
}
}
private void testPartialWrite(int size, FileRecords fileRecords) throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(size);
for (int i = 0; i < size; i++)
buffer.put((byte) 0);
buffer.rewind();
fileRecords.channel().write(buffer);
// appending those bytes should not change the contents
Iterator<Record> records = fileRecords.records().iterator();
for (byte[] value : values) {
assertTrue(records.hasNext());
assertEquals(records.next().value(), ByteBuffer.wrap(value));
}
}
/**
* Iterating over the file does file reads but shouldn't change the position of the underlying FileChannel.
*/
@Test
public void testIterationDoesntChangePosition() throws IOException {
long position = fileRecords.channel().position();
Iterator<Record> records = fileRecords.records().iterator();
for (byte[] value : values) {
assertTrue(records.hasNext());
assertEquals(records.next().value(), ByteBuffer.wrap(value));
}
assertEquals(position, fileRecords.channel().position());
}
/**
* Test a simple append and read.
*/
@Test
public void testRead() {
FileRecords read = fileRecords.slice(0, fileRecords.sizeInBytes());
assertEquals(fileRecords.sizeInBytes(), read.sizeInBytes());
TestUtils.checkEquals(fileRecords.batches(), read.batches());
List<RecordBatch> items = batches(read);
RecordBatch first = items.get(0);
// read from second message until the end
read = fileRecords.slice(first.sizeInBytes(), fileRecords.sizeInBytes() - first.sizeInBytes());
assertEquals(fileRecords.sizeInBytes() - first.sizeInBytes(), read.sizeInBytes());
assertEquals(items.subList(1, items.size()), batches(read), "Read starting from the second message");
// read from second message and size is past the end of the file
read = fileRecords.slice(first.sizeInBytes(), fileRecords.sizeInBytes());
assertEquals(fileRecords.sizeInBytes() - first.sizeInBytes(), read.sizeInBytes());
assertEquals(items.subList(1, items.size()), batches(read), "Read starting from the second message");
// read from second message and position + size overflows
read = fileRecords.slice(first.sizeInBytes(), Integer.MAX_VALUE);
assertEquals(fileRecords.sizeInBytes() - first.sizeInBytes(), read.sizeInBytes());
assertEquals(items.subList(1, items.size()), batches(read), "Read starting from the second message");
// read from second message and size is past the end of the file on a view/slice
read = fileRecords.slice(1, fileRecords.sizeInBytes() - 1)
.slice(first.sizeInBytes() - 1, fileRecords.sizeInBytes());
assertEquals(fileRecords.sizeInBytes() - first.sizeInBytes(), read.sizeInBytes());
assertEquals(items.subList(1, items.size()), batches(read), "Read starting from the second message");
// read from second message and position + size overflows on a view/slice
read = fileRecords.slice(1, fileRecords.sizeInBytes() - 1)
.slice(first.sizeInBytes() - 1, Integer.MAX_VALUE);
assertEquals(fileRecords.sizeInBytes() - first.sizeInBytes(), read.sizeInBytes());
assertEquals(items.subList(1, items.size()), batches(read), "Read starting from the second message");
// read a single message starting from second message
RecordBatch second = items.get(1);
read = fileRecords.slice(first.sizeInBytes(), second.sizeInBytes());
assertEquals(second.sizeInBytes(), read.sizeInBytes());
assertEquals(Collections.singletonList(second), batches(read), "Read a single message starting from the second message");
}
/**
* Test the MessageSet.searchFor API.
*/
@Test
public void testSearch() throws IOException {
// append a new message with a high offset
SimpleRecord lastMessage = new SimpleRecord("test".getBytes());
fileRecords.append(MemoryRecords.withRecords(50L, Compression.NONE, lastMessage));
List<RecordBatch> batches = batches(fileRecords);
int position = 0;
int message1Size = batches.get(0).sizeInBytes();
assertEquals(new FileRecords.LogOffsetPosition(0L, position, message1Size),
fileRecords.searchForOffsetFromPosition(0, 0),
"Should be able to find the first message by its offset");
position += message1Size;
int message2Size = batches.get(1).sizeInBytes();
assertEquals(new FileRecords.LogOffsetPosition(1L, position, message2Size),
fileRecords.searchForOffsetFromPosition(1, 0),
"Should be able to find second message when starting from 0");
assertEquals(new FileRecords.LogOffsetPosition(1L, position, message2Size),
fileRecords.searchForOffsetFromPosition(1, position),
"Should be able to find second message starting from its offset");
position += message2Size + batches.get(2).sizeInBytes();
int message4Size = batches.get(3).sizeInBytes();
assertEquals(new FileRecords.LogOffsetPosition(50L, position, message4Size),
fileRecords.searchForOffsetFromPosition(3, position),
"Should be able to find fourth message from a non-existent offset");
assertEquals(new FileRecords.LogOffsetPosition(50L, position, message4Size),
fileRecords.searchForOffsetFromPosition(50, position),
"Should be able to find fourth message by correct offset");
}
/**
* Test that the message set iterator obeys start and end slicing
*/
@Test
public void testIteratorWithLimits() {
RecordBatch batch = batches(fileRecords).get(1);
int start = fileRecords.searchForOffsetFromPosition(1, 0).position;
int size = batch.sizeInBytes();
Records slice = fileRecords.slice(start, size);
assertEquals(Collections.singletonList(batch), batches(slice));
Records slice2 = fileRecords.slice(start, size - 1);
assertEquals(Collections.emptyList(), batches(slice2));
}
/**
* Test the truncateTo method lops off messages and appropriately updates the size
*/
@Test
public void testTruncate() throws IOException {
RecordBatch batch = batches(fileRecords).get(0);
int end = fileRecords.searchForOffsetFromPosition(1, 0).position;
fileRecords.truncateTo(end);
assertEquals(Collections.singletonList(batch), batches(fileRecords));
assertEquals(batch.sizeInBytes(), fileRecords.sizeInBytes());
}
/**
* Test that truncateTo only calls truncate on the FileChannel if the size of the
* FileChannel is bigger than the target size. This is important because some JVMs
* change the mtime of the file, even if truncate should do nothing.
*/
@Test
public void testTruncateNotCalledIfSizeIsSameAsTargetSize() throws IOException {
FileChannel channelMock = mock(FileChannel.class);
when(channelMock.size()).thenReturn(42L);
when(channelMock.position(42L)).thenReturn(null);
FileRecords fileRecords = new FileRecords(tempFile(), channelMock, Integer.MAX_VALUE);
fileRecords.truncateTo(42);
verify(channelMock, atLeastOnce()).size();
verify(channelMock, times(0)).truncate(anyLong());
}
/**
* Expect a KafkaException if targetSize is bigger than the size of
* the FileRecords.
*/
@Test
public void testTruncateNotCalledIfSizeIsBiggerThanTargetSize() throws IOException {
FileChannel channelMock = mock(FileChannel.class);
when(channelMock.size()).thenReturn(42L);
FileRecords fileRecords = new FileRecords(tempFile(), channelMock, Integer.MAX_VALUE);
try {
fileRecords.truncateTo(43);
fail("Should throw KafkaException");
} catch (KafkaException e) {
// expected
}
verify(channelMock, atLeastOnce()).size();
}
/**
* see #testTruncateNotCalledIfSizeIsSameAsTargetSize
*/
@Test
public void testTruncateIfSizeIsDifferentToTargetSize() throws IOException {
FileChannel channelMock = mock(FileChannel.class);
when(channelMock.size()).thenReturn(42L);
when(channelMock.truncate(anyLong())).thenReturn(channelMock);
FileRecords fileRecords = new FileRecords(tempFile(), channelMock, Integer.MAX_VALUE);
fileRecords.truncateTo(23);
verify(channelMock, atLeastOnce()).size();
verify(channelMock).truncate(23);
}
/**
* Test the new FileRecords with pre allocate as true
*/
@Test
public void testPreallocateTrue() throws IOException {
File temp = tempFile();
FileRecords fileRecords = FileRecords.open(temp, false, 1024 * 1024, true);
long position = fileRecords.channel().position();
int size = fileRecords.sizeInBytes();
assertEquals(0, position);
assertEquals(0, size);
assertEquals(1024 * 1024, temp.length());
}
/**
* Test the new FileRecords with pre allocate as false
*/
@Test
public void testPreallocateFalse() throws IOException {
File temp = tempFile();
FileRecords set = FileRecords.open(temp, false, 1024 * 1024, false);
long position = set.channel().position();
int size = set.sizeInBytes();
assertEquals(0, position);
assertEquals(0, size);
assertEquals(0, temp.length());
}
/**
* Test the new FileRecords with pre allocate as true and file has been clearly shut down, the file will be truncate to end of valid data.
*/
@Test
public void testPreallocateClearShutdown() throws IOException {
File temp = tempFile();
FileRecords fileRecords = FileRecords.open(temp, false, 1024 * 1024, true);
append(fileRecords, values);
int oldPosition = (int) fileRecords.channel().position();
int oldSize = fileRecords.sizeInBytes();
assertEquals(this.fileRecords.sizeInBytes(), oldPosition);
assertEquals(this.fileRecords.sizeInBytes(), oldSize);
fileRecords.close();
File tempReopen = new File(temp.getAbsolutePath());
FileRecords setReopen = FileRecords.open(tempReopen, true, 1024 * 1024, true);
int position = (int) setReopen.channel().position();
int size = setReopen.sizeInBytes();
assertEquals(oldPosition, position);
assertEquals(oldPosition, size);
assertEquals(oldPosition, tempReopen.length());
}
@Test
public void testSearchForTimestamp() throws IOException {
for (RecordVersion version : RecordVersion.values()) {
testSearchForTimestamp(version);
}
}
/**
* Test slice when already sliced file records have start position greater than available bytes
* in the file records.
*/
@Test
public void testSliceForAlreadySlicedFileRecords() throws IOException {
byte[][] values = new byte[][] {
"abcd".getBytes(),
"efgh".getBytes(),
"ijkl".getBytes(),
"mnopqr".getBytes(),
"stuv".getBytes()
};
try (FileRecords fileRecords = createFileRecords(values)) {
List<RecordBatch> items = batches(fileRecords.slice(0, fileRecords.sizeInBytes()));
// Slice from fourth message until the end.
int position = IntStream.range(0, 3).map(i -> items.get(i).sizeInBytes()).sum();
Records sliced = fileRecords.slice(position, fileRecords.sizeInBytes() - position);
assertEquals(fileRecords.sizeInBytes() - position, sliced.sizeInBytes());
assertEquals(items.subList(3, items.size()), batches(sliced), "Read starting from the fourth message");
// Further slice the already sliced file records, from fifth message until the end. Now the
// bytes available in the sliced records are less than the moved position from original records.
position = items.get(3).sizeInBytes();
Records finalSliced = sliced.slice(position, sliced.sizeInBytes() - position);
assertEquals(sliced.sizeInBytes() - position, finalSliced.sizeInBytes());
assertEquals(items.subList(4, items.size()), batches(finalSliced), "Read starting from the fifth message");
}
}
private void testSearchForTimestamp(RecordVersion version) throws IOException {
File temp = tempFile();
FileRecords fileRecords = FileRecords.open(temp, false, 1024 * 1024, true);
appendWithOffsetAndTimestamp(fileRecords, version, 10L, 5, 0);
appendWithOffsetAndTimestamp(fileRecords, version, 11L, 6, 1);
assertFoundTimestamp(new FileRecords.TimestampAndOffset(10L, 5, Optional.of(0)),
fileRecords.searchForTimestamp(9L, 0, 0L), version);
assertFoundTimestamp(new FileRecords.TimestampAndOffset(10L, 5, Optional.of(0)),
fileRecords.searchForTimestamp(10L, 0, 0L), version);
assertFoundTimestamp(new FileRecords.TimestampAndOffset(11L, 6, Optional.of(1)),
fileRecords.searchForTimestamp(11L, 0, 0L), version);
assertNull(fileRecords.searchForTimestamp(12L, 0, 0L));
}
private void assertFoundTimestamp(FileRecords.TimestampAndOffset expected,
FileRecords.TimestampAndOffset actual,
RecordVersion version) {
if (version == RecordVersion.V0) {
assertNull(actual, "Expected no match for message format v0");
} else {
assertNotNull(actual, "Expected to find timestamp for message format " + version);
assertEquals(expected.timestamp, actual.timestamp, "Expected matching timestamps for message format" + version);
assertEquals(expected.offset, actual.offset, "Expected matching offsets for message format " + version);
Optional<Integer> expectedLeaderEpoch = version.value >= RecordVersion.V2.value ?
expected.leaderEpoch : Optional.empty();
assertEquals(expectedLeaderEpoch, actual.leaderEpoch, "Non-matching leader epoch for version " + version);
}
}
private void appendWithOffsetAndTimestamp(FileRecords fileRecords,
RecordVersion recordVersion,
long timestamp,
long offset,
int leaderEpoch) throws IOException {
ByteBuffer buffer = ByteBuffer.allocate(128);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, recordVersion.value,
Compression.NONE, TimestampType.CREATE_TIME, offset, timestamp, leaderEpoch);
builder.append(new SimpleRecord(timestamp, new byte[0], new byte[0]));
fileRecords.append(builder.build());
}
@Test
public void testConversion() throws IOException {
doTestConversion(Compression.NONE, RecordBatch.MAGIC_VALUE_V0);
doTestConversion(Compression.gzip().build(), RecordBatch.MAGIC_VALUE_V0);
doTestConversion(Compression.NONE, RecordBatch.MAGIC_VALUE_V1);
doTestConversion(Compression.gzip().build(), RecordBatch.MAGIC_VALUE_V1);
doTestConversion(Compression.NONE, RecordBatch.MAGIC_VALUE_V2);
doTestConversion(Compression.gzip().build(), RecordBatch.MAGIC_VALUE_V2);
}
@Test
public void testBytesLengthOfWriteTo() throws IOException {
int size = fileRecords.sizeInBytes();
int firstWritten = size / 3;
TransferableChannel channel = Mockito.mock(TransferableChannel.class);
// Firstly we wrote some of the data
fileRecords.writeTo(channel, 0, firstWritten);
verify(channel).transferFrom(any(), anyLong(), eq((long) firstWritten));
// Ensure (length > size - firstWritten)
int secondWrittenLength = size - firstWritten + 1;
fileRecords.writeTo(channel, firstWritten, secondWrittenLength);
// But we still only write (size - firstWritten), which is not fulfilled in the old version
verify(channel).transferFrom(any(), anyLong(), eq((long) size - firstWritten));
}
/**
* Test two conditions:
* 1. If the target offset equals the base offset of the first batch
* 2. If the target offset is less than the base offset of the first batch
* <p>
* If the base offset of the first batch is equal to or greater than the target offset, it should return the
* position of the first batch and the lastOffset method should not be called.
*/
@ParameterizedTest
@ValueSource(longs = {5, 10})
public void testSearchForOffsetFromPosition1(long baseOffset) throws IOException {
File mockFile = mock(File.class);
FileChannel mockChannel = mock(FileChannel.class);
FileLogInputStream.FileChannelRecordBatch batch = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(batch.baseOffset()).thenReturn(baseOffset);
FileRecords fileRecords = Mockito.spy(new FileRecords(mockFile, mockChannel, 100));
mockFileRecordBatches(fileRecords, batch);
FileRecords.LogOffsetPosition result = fileRecords.searchForOffsetFromPosition(5L, 0);
assertEquals(FileRecords.LogOffsetPosition.fromBatch(batch), result);
verify(batch, never()).lastOffset();
}
/**
* Test the case when the target offset equals the last offset of the first batch.
*/
@Test
public void testSearchForOffsetFromPosition2() throws IOException {
File mockFile = mock(File.class);
FileChannel mockChannel = mock(FileChannel.class);
FileLogInputStream.FileChannelRecordBatch batch = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(batch.baseOffset()).thenReturn(3L);
when(batch.lastOffset()).thenReturn(5L);
FileRecords fileRecords = Mockito.spy(new FileRecords(mockFile, mockChannel, 100));
mockFileRecordBatches(fileRecords, batch);
FileRecords.LogOffsetPosition result = fileRecords.searchForOffsetFromPosition(5L, 0);
assertEquals(FileRecords.LogOffsetPosition.fromBatch(batch), result);
// target is equal to the last offset of the batch, we should call lastOffset
verify(batch, times(1)).lastOffset();
}
/**
* Test the case when the target offset equals the last offset of the last batch.
*/
@Test
public void testSearchForOffsetFromPosition3() throws IOException {
File mockFile = mock(File.class);
FileChannel mockChannel = mock(FileChannel.class);
FileLogInputStream.FileChannelRecordBatch prevBatch = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(prevBatch.baseOffset()).thenReturn(5L);
when(prevBatch.lastOffset()).thenReturn(12L);
FileLogInputStream.FileChannelRecordBatch currentBatch = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(currentBatch.baseOffset()).thenReturn(15L);
when(currentBatch.lastOffset()).thenReturn(20L);
FileRecords fileRecords = Mockito.spy(new FileRecords(mockFile, mockChannel, 100));
mockFileRecordBatches(fileRecords, prevBatch, currentBatch);
FileRecords.LogOffsetPosition result = fileRecords.searchForOffsetFromPosition(20L, 0);
assertEquals(FileRecords.LogOffsetPosition.fromBatch(currentBatch), result);
// Because the target offset is in the current batch, we should not call lastOffset in the previous batch
verify(prevBatch, never()).lastOffset();
verify(currentBatch, times(1)).lastOffset();
}
/**
* Test the case when the target offset is within the range of the previous batch.
*/
@Test
public void testSearchForOffsetFromPosition4() throws IOException {
File mockFile = mock(File.class);
FileChannel mockChannel = mock(FileChannel.class);
FileLogInputStream.FileChannelRecordBatch prevBatch = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(prevBatch.baseOffset()).thenReturn(5L);
when(prevBatch.lastOffset()).thenReturn(12L); // > targetOffset
FileLogInputStream.FileChannelRecordBatch currentBatch = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(currentBatch.baseOffset()).thenReturn(15L); // >= targetOffset
FileRecords fileRecords = Mockito.spy(new FileRecords(mockFile, mockChannel, 100));
mockFileRecordBatches(fileRecords, prevBatch, currentBatch);
FileRecords.LogOffsetPosition result = fileRecords.searchForOffsetFromPosition(10L, 0);
assertEquals(FileRecords.LogOffsetPosition.fromBatch(prevBatch), result);
// Because the target offset is in the current batch, we should call lastOffset
// on the previous batch
verify(prevBatch, times(1)).lastOffset();
}
/**
* Test the case when no batch matches the target offset.
*/
@Test
public void testSearchForOffsetFromPosition5() throws IOException {
File mockFile = mock(File.class);
FileChannel mockChannel = mock(FileChannel.class);
FileLogInputStream.FileChannelRecordBatch batch1 = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(batch1.baseOffset()).thenReturn(5L); // < targetOffset
FileLogInputStream.FileChannelRecordBatch batch2 = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(batch2.baseOffset()).thenReturn(8L); // < targetOffset
when(batch2.lastOffset()).thenReturn(9L); // < targetOffset
FileRecords fileRecords = Mockito.spy(new FileRecords(mockFile, mockChannel, 100));
mockFileRecordBatches(fileRecords, batch1, batch2);
FileRecords.LogOffsetPosition result = fileRecords.searchForOffsetFromPosition(10L, 0);
assertNull(result);
// Because the target offset is exceeded by the last offset of the batch2,
// we should call lastOffset on the batch2
verify(batch1, never()).lastOffset();
verify(batch2, times(1)).lastOffset();
}
/**
* Test two conditions:
* 1. If the target offset is less than the base offset of the last batch
* 2. If the target offset equals the base offset of the last batch
*/
@ParameterizedTest
@ValueSource(longs = {8, 10})
public void testSearchForOffsetFromPosition6(long baseOffset) throws IOException {
File mockFile = mock(File.class);
FileChannel mockChannel = mock(FileChannel.class);
FileLogInputStream.FileChannelRecordBatch batch1 = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(batch1.baseOffset()).thenReturn(5L); // < targetOffset
FileLogInputStream.FileChannelRecordBatch batch2 = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(batch2.baseOffset()).thenReturn(baseOffset); // < targetOffset or == targetOffset
when(batch2.lastOffset()).thenReturn(12L); // >= targetOffset
FileRecords fileRecords = Mockito.spy(new FileRecords(mockFile, mockChannel, 100));
mockFileRecordBatches(fileRecords, batch1, batch2);
long targetOffset = 10L;
FileRecords.LogOffsetPosition result = fileRecords.searchForOffsetFromPosition(targetOffset, 0);
assertEquals(FileRecords.LogOffsetPosition.fromBatch(batch2), result);
if (targetOffset == baseOffset) {
// Because the target offset is equal to the base offset of the batch2, we should not call
// lastOffset on batch2 and batch1
verify(batch1, never()).lastOffset();
verify(batch2, never()).lastOffset();
} else {
// Because the target offset is in the batch2, we should not call
// lastOffset on batch1
verify(batch1, never()).lastOffset();
verify(batch2, times(1)).lastOffset();
}
}
/**
* Test the case when the target offset is between two batches.
*/
@Test
public void testSearchForOffsetFromPosition7() throws IOException {
File mockFile = mock(File.class);
FileChannel mockChannel = mock(FileChannel.class);
FileLogInputStream.FileChannelRecordBatch batch1 = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(batch1.baseOffset()).thenReturn(5L);
when(batch1.lastOffset()).thenReturn(10L);
FileLogInputStream.FileChannelRecordBatch batch2 = mock(FileLogInputStream.FileChannelRecordBatch.class);
when(batch2.baseOffset()).thenReturn(15L);
when(batch2.lastOffset()).thenReturn(20L);
FileRecords fileRecords = Mockito.spy(new FileRecords(mockFile, mockChannel, 100));
mockFileRecordBatches(fileRecords, batch1, batch2);
FileRecords.LogOffsetPosition result = fileRecords.searchForOffsetFromPosition(13L, 0);
assertEquals(FileRecords.LogOffsetPosition.fromBatch(batch2), result);
// Because the target offset is between the two batches, we should call lastOffset on the batch1
verify(batch1, times(1)).lastOffset();
verify(batch2, never()).lastOffset();
}
private void mockFileRecordBatches(FileRecords fileRecords, FileLogInputStream.FileChannelRecordBatch... batch) {
List<FileLogInputStream.FileChannelRecordBatch> batches = asList(batch);
doReturn((Iterable<FileLogInputStream.FileChannelRecordBatch>) batches::iterator)
.when(fileRecords)
.batchesFrom(anyInt());
}
private void doTestConversion(Compression compression, byte toMagic) throws IOException {
List<Long> offsets = asList(0L, 2L, 3L, 9L, 11L, 15L, 16L, 17L, 22L, 24L);
Header[] headers = {new RecordHeader("headerKey1", "headerValue1".getBytes()),
new RecordHeader("headerKey2", "headerValue2".getBytes()),
new RecordHeader("headerKey3", "headerValue3".getBytes())};
List<SimpleRecord> records = asList(
new SimpleRecord(1L, "k1".getBytes(), "hello".getBytes()),
new SimpleRecord(2L, "k2".getBytes(), "goodbye".getBytes()),
new SimpleRecord(3L, "k3".getBytes(), "hello again".getBytes()),
new SimpleRecord(4L, "k4".getBytes(), "goodbye for now".getBytes()),
new SimpleRecord(5L, "k5".getBytes(), "hello again".getBytes()),
new SimpleRecord(6L, "k6".getBytes(), "I sense indecision".getBytes()),
new SimpleRecord(7L, "k7".getBytes(), "what now".getBytes()),
new SimpleRecord(8L, "k8".getBytes(), "running out".getBytes(), headers),
new SimpleRecord(9L, "k9".getBytes(), "ok, almost done".getBytes()),
new SimpleRecord(10L, "k10".getBytes(), "finally".getBytes(), headers));
assertEquals(offsets.size(), records.size(), "incorrect test setup");
ByteBuffer buffer = ByteBuffer.allocate(1024);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V0, compression,
TimestampType.CREATE_TIME, 0L);
for (int i = 0; i < 3; i++)
builder.appendWithOffset(offsets.get(i), records.get(i));
builder.close();
builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V1, compression, TimestampType.CREATE_TIME,
0L);
for (int i = 3; i < 6; i++)
builder.appendWithOffset(offsets.get(i), records.get(i));
builder.close();
builder = MemoryRecords.builder(buffer, RecordBatch.MAGIC_VALUE_V2, compression, TimestampType.CREATE_TIME, 0L);
for (int i = 6; i < 10; i++)
builder.appendWithOffset(offsets.get(i), records.get(i));
builder.close();
buffer.flip();
try (FileRecords fileRecords = FileRecords.open(tempFile())) {
fileRecords.append(MemoryRecords.readableRecords(buffer));
fileRecords.flush();
if (toMagic <= RecordBatch.MAGIC_VALUE_V1 && compression.type() == CompressionType.NONE) {
long firstOffset;
if (toMagic == RecordBatch.MAGIC_VALUE_V0)
firstOffset = 11L; // v1 record
else
firstOffset = 17; // v2 record
List<Long> filteredOffsets = new ArrayList<>(offsets);
List<SimpleRecord> filteredRecords = new ArrayList<>(records);
int index = filteredOffsets.indexOf(firstOffset) - 1;
filteredRecords.remove(index);
filteredOffsets.remove(index);
}
}
}
private static List<RecordBatch> batches(Records buffer) {
return TestUtils.toList(buffer.batches());
}
private FileRecords createFileRecords(byte[][] values) throws IOException {
FileRecords fileRecords = FileRecords.open(tempFile());
append(fileRecords, values);
return fileRecords;
}
private void append(FileRecords fileRecords, byte[][] values) throws IOException {
long offset = 0L;
for (byte[] value : values) {
ByteBuffer buffer = ByteBuffer.allocate(128);
MemoryRecordsBuilder builder = MemoryRecords.builder(buffer, RecordBatch.CURRENT_MAGIC_VALUE,
Compression.NONE, TimestampType.CREATE_TIME, offset);
builder.appendWithOffset(offset++, System.currentTimeMillis(), null, value);
fileRecords.append(builder.build());
}
fileRecords.flush();
}
}
|
googleapis/google-cloud-java | 35,607 | java-recaptchaenterprise/proto-google-cloud-recaptchaenterprise-v1/src/main/java/com/google/recaptchaenterprise/v1/WafSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/recaptchaenterprise/v1/recaptchaenterprise.proto
// Protobuf Java Version: 3.25.8
package com.google.recaptchaenterprise.v1;
/**
*
*
* <pre>
* Settings specific to keys that can be used for WAF (Web Application
* Firewall).
* </pre>
*
* Protobuf type {@code google.cloud.recaptchaenterprise.v1.WafSettings}
*/
public final class WafSettings extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.recaptchaenterprise.v1.WafSettings)
WafSettingsOrBuilder {
private static final long serialVersionUID = 0L;
// Use WafSettings.newBuilder() to construct.
private WafSettings(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private WafSettings() {
wafService_ = 0;
wafFeature_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new WafSettings();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_WafSettings_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_WafSettings_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.recaptchaenterprise.v1.WafSettings.class,
com.google.recaptchaenterprise.v1.WafSettings.Builder.class);
}
/**
*
*
* <pre>
* Supported WAF features. For more information, see
* https://cloud.google.com/recaptcha/docs/usecase#comparison_of_features.
* </pre>
*
* Protobuf enum {@code google.cloud.recaptchaenterprise.v1.WafSettings.WafFeature}
*/
public enum WafFeature implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Undefined feature.
* </pre>
*
* <code>WAF_FEATURE_UNSPECIFIED = 0;</code>
*/
WAF_FEATURE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Redirects suspicious traffic to reCAPTCHA.
* </pre>
*
* <code>CHALLENGE_PAGE = 1;</code>
*/
CHALLENGE_PAGE(1),
/**
*
*
* <pre>
* Use reCAPTCHA session-tokens to protect the whole user session on the
* site's domain.
* </pre>
*
* <code>SESSION_TOKEN = 2;</code>
*/
SESSION_TOKEN(2),
/**
*
*
* <pre>
* Use reCAPTCHA action-tokens to protect user actions.
* </pre>
*
* <code>ACTION_TOKEN = 3;</code>
*/
ACTION_TOKEN(3),
/**
*
*
* <pre>
* Use reCAPTCHA WAF express protection to protect any content other than
* web pages, like APIs and IoT devices.
* </pre>
*
* <code>EXPRESS = 5;</code>
*/
EXPRESS(5),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Undefined feature.
* </pre>
*
* <code>WAF_FEATURE_UNSPECIFIED = 0;</code>
*/
public static final int WAF_FEATURE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Redirects suspicious traffic to reCAPTCHA.
* </pre>
*
* <code>CHALLENGE_PAGE = 1;</code>
*/
public static final int CHALLENGE_PAGE_VALUE = 1;
/**
*
*
* <pre>
* Use reCAPTCHA session-tokens to protect the whole user session on the
* site's domain.
* </pre>
*
* <code>SESSION_TOKEN = 2;</code>
*/
public static final int SESSION_TOKEN_VALUE = 2;
/**
*
*
* <pre>
* Use reCAPTCHA action-tokens to protect user actions.
* </pre>
*
* <code>ACTION_TOKEN = 3;</code>
*/
public static final int ACTION_TOKEN_VALUE = 3;
/**
*
*
* <pre>
* Use reCAPTCHA WAF express protection to protect any content other than
* web pages, like APIs and IoT devices.
* </pre>
*
* <code>EXPRESS = 5;</code>
*/
public static final int EXPRESS_VALUE = 5;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static WafFeature valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static WafFeature forNumber(int value) {
switch (value) {
case 0:
return WAF_FEATURE_UNSPECIFIED;
case 1:
return CHALLENGE_PAGE;
case 2:
return SESSION_TOKEN;
case 3:
return ACTION_TOKEN;
case 5:
return EXPRESS;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<WafFeature> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<WafFeature> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<WafFeature>() {
public WafFeature findValueByNumber(int number) {
return WafFeature.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.WafSettings.getDescriptor().getEnumTypes().get(0);
}
private static final WafFeature[] VALUES = values();
public static WafFeature valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private WafFeature(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.recaptchaenterprise.v1.WafSettings.WafFeature)
}
/**
*
*
* <pre>
* Web Application Firewalls supported by reCAPTCHA.
* </pre>
*
* Protobuf enum {@code google.cloud.recaptchaenterprise.v1.WafSettings.WafService}
*/
public enum WafService implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Undefined WAF
* </pre>
*
* <code>WAF_SERVICE_UNSPECIFIED = 0;</code>
*/
WAF_SERVICE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Cloud Armor
* </pre>
*
* <code>CA = 1;</code>
*/
CA(1),
/**
*
*
* <pre>
* Fastly
* </pre>
*
* <code>FASTLY = 3;</code>
*/
FASTLY(3),
/**
*
*
* <pre>
* Cloudflare
* </pre>
*
* <code>CLOUDFLARE = 4;</code>
*/
CLOUDFLARE(4),
/**
*
*
* <pre>
* Akamai
* </pre>
*
* <code>AKAMAI = 5;</code>
*/
AKAMAI(5),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Undefined WAF
* </pre>
*
* <code>WAF_SERVICE_UNSPECIFIED = 0;</code>
*/
public static final int WAF_SERVICE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Cloud Armor
* </pre>
*
* <code>CA = 1;</code>
*/
public static final int CA_VALUE = 1;
/**
*
*
* <pre>
* Fastly
* </pre>
*
* <code>FASTLY = 3;</code>
*/
public static final int FASTLY_VALUE = 3;
/**
*
*
* <pre>
* Cloudflare
* </pre>
*
* <code>CLOUDFLARE = 4;</code>
*/
public static final int CLOUDFLARE_VALUE = 4;
/**
*
*
* <pre>
* Akamai
* </pre>
*
* <code>AKAMAI = 5;</code>
*/
public static final int AKAMAI_VALUE = 5;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static WafService valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static WafService forNumber(int value) {
switch (value) {
case 0:
return WAF_SERVICE_UNSPECIFIED;
case 1:
return CA;
case 3:
return FASTLY;
case 4:
return CLOUDFLARE;
case 5:
return AKAMAI;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<WafService> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<WafService> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<WafService>() {
public WafService findValueByNumber(int number) {
return WafService.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.WafSettings.getDescriptor().getEnumTypes().get(1);
}
private static final WafService[] VALUES = values();
public static WafService valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private WafService(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.recaptchaenterprise.v1.WafSettings.WafService)
}
public static final int WAF_SERVICE_FIELD_NUMBER = 1;
private int wafService_ = 0;
/**
*
*
* <pre>
* Required. The WAF service that uses this key.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafService waf_service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for wafService.
*/
@java.lang.Override
public int getWafServiceValue() {
return wafService_;
}
/**
*
*
* <pre>
* Required. The WAF service that uses this key.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafService waf_service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The wafService.
*/
@java.lang.Override
public com.google.recaptchaenterprise.v1.WafSettings.WafService getWafService() {
com.google.recaptchaenterprise.v1.WafSettings.WafService result =
com.google.recaptchaenterprise.v1.WafSettings.WafService.forNumber(wafService_);
return result == null
? com.google.recaptchaenterprise.v1.WafSettings.WafService.UNRECOGNIZED
: result;
}
public static final int WAF_FEATURE_FIELD_NUMBER = 2;
private int wafFeature_ = 0;
/**
*
*
* <pre>
* Required. The WAF feature for which this key is enabled.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafFeature waf_feature = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for wafFeature.
*/
@java.lang.Override
public int getWafFeatureValue() {
return wafFeature_;
}
/**
*
*
* <pre>
* Required. The WAF feature for which this key is enabled.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafFeature waf_feature = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The wafFeature.
*/
@java.lang.Override
public com.google.recaptchaenterprise.v1.WafSettings.WafFeature getWafFeature() {
com.google.recaptchaenterprise.v1.WafSettings.WafFeature result =
com.google.recaptchaenterprise.v1.WafSettings.WafFeature.forNumber(wafFeature_);
return result == null
? com.google.recaptchaenterprise.v1.WafSettings.WafFeature.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (wafService_
!= com.google.recaptchaenterprise.v1.WafSettings.WafService.WAF_SERVICE_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, wafService_);
}
if (wafFeature_
!= com.google.recaptchaenterprise.v1.WafSettings.WafFeature.WAF_FEATURE_UNSPECIFIED
.getNumber()) {
output.writeEnum(2, wafFeature_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (wafService_
!= com.google.recaptchaenterprise.v1.WafSettings.WafService.WAF_SERVICE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, wafService_);
}
if (wafFeature_
!= com.google.recaptchaenterprise.v1.WafSettings.WafFeature.WAF_FEATURE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, wafFeature_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.recaptchaenterprise.v1.WafSettings)) {
return super.equals(obj);
}
com.google.recaptchaenterprise.v1.WafSettings other =
(com.google.recaptchaenterprise.v1.WafSettings) obj;
if (wafService_ != other.wafService_) return false;
if (wafFeature_ != other.wafFeature_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + WAF_SERVICE_FIELD_NUMBER;
hash = (53 * hash) + wafService_;
hash = (37 * hash) + WAF_FEATURE_FIELD_NUMBER;
hash = (53 * hash) + wafFeature_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.recaptchaenterprise.v1.WafSettings parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.recaptchaenterprise.v1.WafSettings parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.recaptchaenterprise.v1.WafSettings prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Settings specific to keys that can be used for WAF (Web Application
* Firewall).
* </pre>
*
* Protobuf type {@code google.cloud.recaptchaenterprise.v1.WafSettings}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.recaptchaenterprise.v1.WafSettings)
com.google.recaptchaenterprise.v1.WafSettingsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_WafSettings_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_WafSettings_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.recaptchaenterprise.v1.WafSettings.class,
com.google.recaptchaenterprise.v1.WafSettings.Builder.class);
}
// Construct using com.google.recaptchaenterprise.v1.WafSettings.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
wafService_ = 0;
wafFeature_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.recaptchaenterprise.v1.RecaptchaEnterpriseProto
.internal_static_google_cloud_recaptchaenterprise_v1_WafSettings_descriptor;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.WafSettings getDefaultInstanceForType() {
return com.google.recaptchaenterprise.v1.WafSettings.getDefaultInstance();
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.WafSettings build() {
com.google.recaptchaenterprise.v1.WafSettings result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.WafSettings buildPartial() {
com.google.recaptchaenterprise.v1.WafSettings result =
new com.google.recaptchaenterprise.v1.WafSettings(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.recaptchaenterprise.v1.WafSettings result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.wafService_ = wafService_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.wafFeature_ = wafFeature_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.recaptchaenterprise.v1.WafSettings) {
return mergeFrom((com.google.recaptchaenterprise.v1.WafSettings) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.recaptchaenterprise.v1.WafSettings other) {
if (other == com.google.recaptchaenterprise.v1.WafSettings.getDefaultInstance()) return this;
if (other.wafService_ != 0) {
setWafServiceValue(other.getWafServiceValue());
}
if (other.wafFeature_ != 0) {
setWafFeatureValue(other.getWafFeatureValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
wafService_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
wafFeature_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int wafService_ = 0;
/**
*
*
* <pre>
* Required. The WAF service that uses this key.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafService waf_service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for wafService.
*/
@java.lang.Override
public int getWafServiceValue() {
return wafService_;
}
/**
*
*
* <pre>
* Required. The WAF service that uses this key.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafService waf_service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The enum numeric value on the wire for wafService to set.
* @return This builder for chaining.
*/
public Builder setWafServiceValue(int value) {
wafService_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The WAF service that uses this key.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafService waf_service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The wafService.
*/
@java.lang.Override
public com.google.recaptchaenterprise.v1.WafSettings.WafService getWafService() {
com.google.recaptchaenterprise.v1.WafSettings.WafService result =
com.google.recaptchaenterprise.v1.WafSettings.WafService.forNumber(wafService_);
return result == null
? com.google.recaptchaenterprise.v1.WafSettings.WafService.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Required. The WAF service that uses this key.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafService waf_service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The wafService to set.
* @return This builder for chaining.
*/
public Builder setWafService(com.google.recaptchaenterprise.v1.WafSettings.WafService value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
wafService_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The WAF service that uses this key.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafService waf_service = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearWafService() {
bitField0_ = (bitField0_ & ~0x00000001);
wafService_ = 0;
onChanged();
return this;
}
private int wafFeature_ = 0;
/**
*
*
* <pre>
* Required. The WAF feature for which this key is enabled.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafFeature waf_feature = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The enum numeric value on the wire for wafFeature.
*/
@java.lang.Override
public int getWafFeatureValue() {
return wafFeature_;
}
/**
*
*
* <pre>
* Required. The WAF feature for which this key is enabled.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafFeature waf_feature = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The enum numeric value on the wire for wafFeature to set.
* @return This builder for chaining.
*/
public Builder setWafFeatureValue(int value) {
wafFeature_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The WAF feature for which this key is enabled.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafFeature waf_feature = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The wafFeature.
*/
@java.lang.Override
public com.google.recaptchaenterprise.v1.WafSettings.WafFeature getWafFeature() {
com.google.recaptchaenterprise.v1.WafSettings.WafFeature result =
com.google.recaptchaenterprise.v1.WafSettings.WafFeature.forNumber(wafFeature_);
return result == null
? com.google.recaptchaenterprise.v1.WafSettings.WafFeature.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Required. The WAF feature for which this key is enabled.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafFeature waf_feature = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @param value The wafFeature to set.
* @return This builder for chaining.
*/
public Builder setWafFeature(com.google.recaptchaenterprise.v1.WafSettings.WafFeature value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
wafFeature_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The WAF feature for which this key is enabled.
* </pre>
*
* <code>
* .google.cloud.recaptchaenterprise.v1.WafSettings.WafFeature waf_feature = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearWafFeature() {
bitField0_ = (bitField0_ & ~0x00000002);
wafFeature_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.recaptchaenterprise.v1.WafSettings)
}
// @@protoc_insertion_point(class_scope:google.cloud.recaptchaenterprise.v1.WafSettings)
private static final com.google.recaptchaenterprise.v1.WafSettings DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.recaptchaenterprise.v1.WafSettings();
}
public static com.google.recaptchaenterprise.v1.WafSettings getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<WafSettings> PARSER =
new com.google.protobuf.AbstractParser<WafSettings>() {
@java.lang.Override
public WafSettings parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<WafSettings> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<WafSettings> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.recaptchaenterprise.v1.WafSettings getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/harmony | 35,208 | classlib/modules/nio_char/src/main/java/org/apache/harmony/niochar/CharsetProviderImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.niochar;
import java.lang.reflect.Constructor;
import java.nio.charset.Charset;
import java.nio.charset.IllegalCharsetNameException;
import java.nio.charset.spi.CharsetProvider;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* This class is an implementation of the java.nio.charset.spi.CharsetProvider
* class, in spite of the fact that it is abstract. It is a base class of a
* concrete character set provider implementation Please note, a derived class
* should define the getPackageName() and getCharsetsInfo() methods. The first
* of them has to return a string with a package name where the derived class is
* located. The second one has to construct an array, the structure of which is
* described below. See CharsetProviderImplStd or CharsetProviderImplExt for
* example.
*/
public class CharsetProviderImpl extends CharsetProvider {
/**
* Flags whether the default providers have got the native implementation
* loaded. These are optional and used to improve performance in some
* circumstances.
*/
private static boolean HAS_LOADED_NATIVES = false;
static {
try {
System.loadLibrary("hyniochar"); //$NON-NLS-1$
HAS_LOADED_NATIVES = true;
} catch (UnsatisfiedLinkError e) {
// Ignore - leave as natives unavailable.
}
}
/**
* The named index of the 0th element of the <code>charsets[]</code> array.
* It means a charset class name.
*/
protected static final int CHARSET_CLASS = 0;
/**
* The named index of the 1st element of the <code>charsets[]</code> array.
* It means a charset instance.
*/
protected static final int CHARSET_INSTANCE = 1;
/**
* The named index of the 2nd element of the <code>charsets[]</code> array.
* It means a charset aliases array.
*/
protected static final int CHARSET_ALIASES = 2;
/**
* Answers whether the provider has loaded the native implementation of the
* encoders/decoders.
*
* @return true if the natives are loaded.
*/
public static boolean hasLoadedNatives() {
return HAS_LOADED_NATIVES;
}
/**
* A cache of the charset instances.
*/
protected Map<String, Object[]> cache;
/**
* An array returned by <code>getCharsetsInfo()</code>.
*/
protected Object charsets[][];
/**
* A package name returned by <code>getPackageName()</code>.
*/
protected String packageName;
/*
* Utility to convert valid charset names to upper case
*/
private static String toUpperCase(String name) {
int length = name.length();
char[] output = new char[length];
for (int i = 0; i < length; i++) {
char ch = name.charAt(i);
if (passthru(ch)) {
output[i] = ch;
} else {
if ((ch >= '\u0061') && (ch <= '\u007A')) {
// Lowercase 'a' to 'z'
output[i] = (char) (ch - ('a' - 'A'));
} else {
throw new IllegalCharsetNameException(name);
}
}
}
return new String(output);
}
/*
* Answers true if the character is already considered uppercase, false otherwise.
*/
private static boolean passthru(char c) {
return ((c >= '\u0041') && (c <= '\u005A')) || // Uppercase letters 'A' to 'Z'
((c >= '\u0030') && (c <= '\u0039')) || // Digits '0' to '9'
(c == '\u002D') || // Dash '-'
(c == '\u002E') || // Period '.'
(c == '\u003A') || // Colon ':'
(c == '\u005F'); // Underscore '_'
}
/**
* Default constructor for the built-in charset provider implementation.
*/
public CharsetProviderImpl() {
cache = Collections.synchronizedMap(new HashMap<String, Object[]>());
charsets = getCharsetsInfo();
packageName = getPackageName();
for (int i = 0; i < charsets.length; i++) {
String aliases[] = (String[]) charsets[i][CHARSET_ALIASES];
for (int a = 0; a < aliases.length; a++) {
cache.put(toUpperCase(aliases[a]), charsets[i]);
}
}
}
/**
* Answers an iterator over the list of available charsets.
*
* @return available charsets.
*/
@Override
public Iterator<Charset> charsets() {
ArrayList<Charset> list = new ArrayList<Charset>();
for (int i = 0; i < charsets.length; i++) {
list
.add(charsetForName(((String[]) charsets[i][CHARSET_ALIASES])[0]));
}
return list.iterator();
}
/**
* Answers the charset with the given canonical or alias name.
*
* Subsequent requests for the same charset will answer the same instance.
* If the charset is unavailable the method returns <code>null</code>.
*
* @param charsetName
* the name of a character set.
* @return the charset requested, or <code>null</code> if unavailable.
*/
@Override
public Charset charsetForName(String charsetName) {
Object arr[] = cache.get(toUpperCase(charsetName));
if (arr == null) {
return null;
}
// Make an instance of the found charset.
if (arr[CHARSET_INSTANCE] == null) {
final String className = packageName
+ "." + (String) arr[CHARSET_CLASS]; //$NON-NLS-1$
final String canonicalName = ((String[]) arr[CHARSET_ALIASES])[0];
final String aliases[] = (String[]) arr[CHARSET_ALIASES];
arr[CHARSET_INSTANCE] = AccessController
.doPrivileged(new PrivilegedAction<Object>() {
public Object run() {
try {
Class<?> cls = Class.forName(className);
Constructor<?> ctor = cls
.getConstructor(new Class[] {
String.class, String[].class });
ctor.setAccessible(true);
return ctor.newInstance(new Object[] {
canonicalName, aliases });
} catch (Exception e) {
return null;
}
}
});
}
return (Charset) arr[CHARSET_INSTANCE];
}
/**
* A helper method for answering all the available charsets in this
* provider.
*
* The method adds to the given map by storing charset canonical names as
* the keys, with associated charsets as the value.
*
* @param map
* for storing the descriptions of the charsets.
*/
public final void putCharsets(Map<String, Charset> map) {
Object[][] charsetInfo = getCharsetsInfo();
for (int i = 0; i < charsetInfo.length; i++) {
final String canonicalName = ((String[]) charsetInfo[i][CHARSET_ALIASES])[0];
Charset cs = charsetForName(canonicalName);
if (cs != null) {
map.put(canonicalName, cs);
}
}
}
protected String getPackageName() {
return "org.apache.harmony.niochar.charset"; //$NON-NLS-1$
}
protected Object[][] getCharsetsInfo() {
/* The next charset aliases corresponds IANA registry
* http://www.iana.org/assignments/character-sets.
*
*
* Array structure:
*
* charsetsInfo[][0] - String: A charset class name.
* The named index is CHARSET_CLASS.
* charsetsInfo[][1] - Charset: A charset instance.
* The named index is CHARSET_INSTANCE.
* charsetsInfo[][2] - String[]: A charset aliases array.
* The named index is CHARSET_ALIASES.
* THE FIRST ELEMENT OF THE ALIASES ARRAY MUST BE
* A CANONICAL CHARSET NAME.
*/
@SuppressWarnings("nls")
Object charsetsInfo[][] = {
{ "US_ASCII", null,new String[] { "US-ASCII",
"ANSI_X3.4-1968",
"ANSI_X3.4-1986",
"iso-ir-6",
"iso_646.irv:1983",
"ISO_646.irv:1991",
"ASCII",
"ISO646-US",
"us",
"cp367",
"ascii7",
"646",
"csASCII" } },
{ "KOI8_R", null,new String[] { "KOI8-R",
"csKOI8R" } },
{ "CP_1250", null,new String[] { "windows-1250",
"cp1250" } },
{ "CP_1251", null,new String[] { "windows-1251",
"cp1251" } },
{ "CP_1252", null,new String[] { "windows-1252",
"cp1252" } },
{ "CP_1253", null,new String[] { "windows-1253",
"cp1253" } },
{ "CP_1254", null,new String[] { "windows-1254",
"cp1254" } },
{ "CP_1257", null,new String[] { "windows-1257",
"cp1257" } },
{ "ISO_8859_1", null,new String[] { "ISO-8859-1",
"8859_1", /*not in IANA Registry*/
"ISO8859-1", /*not in IANA Registry*/
"ISO8859_1", /*not in IANA Registry*/
"ISO_8859-1:1987",
"iso-ir-100",
"ISO_8859-1",
"latin1",
"l1",
"IBM819",
"ISO_8859_1",
"IBM-819",
"CP819",
"819",
"csISOLatin1" } },
{ "ISO_8859_2", null,new String[] { "ISO-8859-2",
"8859_2", /*not in IANA Registry*/
"ISO_8859-2:1987",
"iso-ir-101",
"ISO_8859-2",
"latin2",
"l2",
"csISOLatin2" } },
{ "ISO_8859_4", null,new String[] { "ISO-8859-4",
"8859_4", /*not in IANA Registry*/
"ISO_8859-4:1988",
"iso-ir-110",
"ISO_8859-4",
"latin4",
"l4",
"csISOLatin4" } },
{ "ISO_8859_5", null,new String[] { "ISO-8859-5",
"8859_5", /*not in IANA Registry*/
"ISO_8859-5:1988",
"iso-ir-144",
"ISO_8859-5",
"cyrillic",
"csISOLatinCyrillic" } },
{ "ISO_8859_7", null,new String[] { "ISO-8859-7",
"ISO_8859-7:1987",
"ISO_8859-7",
"iso-ir-126",
"ELOT_928",
"ECMA-118",
"greek",
"greek8",
"csISOLatinGreek" } },
{ "ISO_8859_9", null,new String[] { "ISO-8859-9",
"ISO_8859-9:1989",
"iso-ir-148",
"ISO_8859-9",
"latin5",
"l5",
"csISOLatin5" } },
{ "ISO_8859_13", null,new String[] { "ISO-8859-13"} },
{ "ISO_8859_15", null,new String[] { "ISO-8859-15",
"ISO_8859-15",
"Latin-9" } },
{ "UTF_8", null,new String[] { "UTF-8",
"UTF8" /*not in IANA Registry*/} },
{ "UTF_16", null,new String[] { "UTF-16",
"UTF16",
"UTF_16" } },
{ "UTF_16LE", null,new String[] { "UTF-16LE",
"X-UTF-16LE",
"UTF_16LE" } },
{ "UTF_16BE", null,new String[] { "UTF-16BE",
"X-UTF-16BE",
"UTF_16BE" } },
{ "IBM866", null, new String[] { "IBM866",
"cp866",
"866",
"csIBM866" } },
//additional charsets
{ "additional.windows_1255",null, new String[] { "windows-1255",
"cp1255" } },
{ "additional.windows_1256",null, new String[] { "windows-1256",
"cp1256" } },
{ "additional.IBM1026", null, new String[] { "IBM1026",
"CP1026",
"csIBM1026" } },
{ "additional.IBM1047", null, new String[] { "IBM1047",
"1047",
"cp1047",
"ibm-1047" } },
{ "additional.IBM037", null, new String[] { "IBM037",
"cp037",
"ebcdic-cp-us",
"ebcdic-cp-ca",
"ebcdic-cp-wt",
"ebcdic-cp-nl",
"csIBM037" } },
{ "additional.IBM424", null, new String[] { "IBM424",
"cp424",
"ebcdic-cp-he",
"csIBM424" } },
{ "additional.IBM437", null, new String[] { "IBM437",
"cp437",
"437",
"csPC8CodePage437" } },
{ "additional.IBM500", null, new String[] { "IBM500",
"CP500",
"ebcdic-cp-be",
"ebcdic-cp-ch",
"csIBM500" } },
{ "additional.IBM775", null, new String[] { "IBM775",
"cp775",
"csPC775Baltic" } },
{ "additional.IBM850", null, new String[] { "IBM850",
"cp850",
"850",
"csPC850Multilingual" } },
{ "additional.IBM852", null, new String[] { "IBM852",
"cp852",
"852",
"csPCp852" } },
{ "additional.IBM855", null, new String[] { "IBM855",
"cp855",
"855",
"csIBM855" } },
{ "additional.IBM857", null, new String[] { "IBM857",
"cp857",
"857",
"csIBM857" } },
{ "additional.IBM860", null, new String[] { "IBM860",
"cp860",
"860",
"csIBM860" } },
{ "additional.IBM861", null, new String[] { "IBM861",
"cp861",
"861",
"cp-is",
"csIBM861" } },
{ "additional.IBM862", null, new String[] { "IBM862",
"cp862",
"862",
"csPC862LatinHebrew" } },
{ "additional.IBM863", null, new String[] { "IBM863",
"cp863",
"863",
"csIBM863" } },
{ "additional.IBM865", null, new String[] { "IBM865",
"cp865",
"865",
"csIBM865" } },
{ "additional.IBM869", null, new String[] { "IBM869",
"cp869",
"869",
"cp-gr",
"csIBM869" } },
{ "additional.IBM00858", null, new String[] { "IBM00858",
"cp858",
"CCSID00858",
"CP00858" } },
{ "additional.IBM01140", null, new String[] { "IBM01140",
"cp1140",
"CCSID01140",
"CP01140" } },
{ "additional.IBM01141", null, new String[] { "IBM01141",
"cp1141",
"CCSID01141",
"CP01141" } },
{ "additional.IBM01142", null, new String[] { "IBM01142",
"cp1142",
"CCSID01142",
"CP01142" } },
{ "additional.IBM01143", null, new String[] { "IBM01143",
"cp1143",
"CCSID01143",
"CP01143" } },
{ "additional.IBM01144", null, new String[] { "IBM01144",
"cp1144",
"CCSID01144",
"CP01144" } },
{ "additional.IBM01145", null, new String[] { "IBM01145",
"cp1145",
"CCSID01145",
"CP01145" } },
{ "additional.IBM01146", null, new String[] { "IBM01146",
"cp1146",
"CCSID01146",
"CP01146" } },
{ "additional.IBM01147", null, new String[] { "IBM01147",
"cp1147",
"CCSID01147",
"CP01147" } },
{ "additional.IBM01148", null, new String[] { "IBM01148",
"cp1148",
"CCSID01148",
"CP01148" } },
{ "additional.IBM01149", null, new String[] { "IBM01149",
"cp1149",
"CCSID01149",
"CP01149" } },
{ "additional.IBM273", null, new String[] { "IBM273",
"cp273",
"csIBM273" } },
{ "additional.IBM277", null, new String[] { "IBM277",
"cp277",
"EBCDIC-CP-DK",
"EBCDIC-CP-NO",
"csIBM277" } },
{ "additional.IBM278", null, new String[] { "IBM278",
"cp278",
"ebcdic-cp-fi",
"ebcdic-cp-se",
"csIBM278" } },
{ "additional.IBM280", null, new String[] { "IBM280",
"cp280",
"ebcdic-cp-it",
"csIBM280" } },
{ "additional.IBM284", null, new String[] { "IBM284",
"cp284",
"ebcdic-cp-es",
"csIBM284" } },
{ "additional.IBM285", null, new String[] { "IBM285",
"cp285",
"ebcdic-cp-gb",
"csIBM285" } },
{ "additional.IBM297", null, new String[] { "IBM297",
"cp297",
"ebcdic-cp-fr",
"csIBM297" } },
{ "additional.IBM870", null, new String[] { "IBM870",
"cp870",
"ebcdic-cp-roece",
"ebcdic-cp-yu",
"csIBM870" } },
{ "additional.IBM871", null, new String[] { "IBM871",
"cp871",
"ebcdic-cp-is",
"csIBM871" } },
{ "additional.IBM918", null, new String[] { "IBM918",
"cp918",
"ebcdic-cp-ar2",
"csIBM918" } },
{ "additional.IBM420", null, new String[] { "IBM420",
"cp420",
"ebcdic-cp-ar1",
"csIBM420" } },
{ "additional.IBM864", null, new String[] { "IBM864",
"cp864",
"csIBM864" } },
{ "additional.IBM868", null, new String[] { "IBM868",
"cp868",
"cp-ar",
"csIBM868" } },
{ "additional.ISO_8859_3", null, new String[] { "ISO-8859-3",
"8859_3", /*not in IANA Registry*/
"ISO_8859_3",
"ISO_8859_2:1998",
"iso-ir-109",
"ISO_8859-3",
"latin3",
"l3",
"csISOLatin3" } },
{ "additional.ISO_8859_6", null, new String[] { "ISO-8859-6",
"ISO_8859_6",
"ISO_8859-6:1987",
"iso-ir-127",
"ISO_8859-6",
"ECMA-114",
"ASMO-708",
"arabic",
"csISOLatinArabic" } },
{ "additional.ISO_8859_8", null, new String[] { "ISO-8859-8",
"ISO_8859_8",
"ISO_8859-8:1988",
"iso-ir-138",
"ISO_8859-8",
"hebrew",
"csISOLatinHebrew" } },
{ "additional.IBM_Thai", null, new String[] { "IBM-Thai",
"cp838" } },
{ "additional.x_IBM737", null, new String[] { "x-IBM737",
"x-ibm-737_P100-1997",
"cp737" } },
{ "additional.x_IBM856", null, new String[] { "x-IBM856",
"cp856"} },
{ "additional.x_IBM874", null, new String[] { "TIS-620",
"x-IBM874",
"cp874"} },
{ "additional.x_IBM875", null, new String[] { "x-IBM875",
"x-ibm-875_P100-1995",
"cp875"} },
{ "additional.x_IBM922", null, new String[] {"x-IBM922",
"cp922"} },
{ "additional.x_IBM1006", null, new String[] { "x-IBM1006",
"x-ibm-1006_P100-1995",
"cp1006"} },
{ "additional.x_IBM1025", null, new String[] { "x-IBM1025",
"x-ibm-1025_P100-1995",
"cp1025"} },
{ "additional.x_IBM1112", null, new String[] { "x-IBM1112",
"x-ibm-1112_P100-1995",
"cp1112"} },
{ "additional.x_IBM1122", null, new String[] { "x-IBM1122",
"x-ibm-1122_P100-1999",
"cp1122"} },
{ "additional.x_IBM1123", null, new String[] { "x-IBM1123",
"x-ibm-1123_P100-1995",
"cp1123"} },
{ "additional.x_IBM1124", null, new String[] { "x-IBM1124",
"x-ibm-1124_P100-1996",
"cp1124"} },
{ "additional.x_IBM1097", null, new String[] { "x-IBM1097",
"x-ibm-1097_P100-1995",
"cp1097"} },
{ "additional.x_IBM1098", null, new String[] { "x-IBM1098",
"x-ibm-1098_P100-1995",
"cp1098"} },
{ "additional.x_MacCyrillic", null, new String[] { "x-MacCyrillic",
"x-mac-cyrillic",
"MacCyrillic"} },
{ "additional.x_MacGreek", null, new String[] { "x-MacGreek",
"x-mac-greek",
"MacGreek"} },
{ "additional.x_MacTurkish", null, new String[] { "x-MacTurkish",
"x-mac-turkish",
"MacTurkish"} },
{ "additional.windows_31j", null, new String[] { "Shift_JIS",
"windows-31j",
"MS932",
"windows-932",
"cp932",
"csWindows31J",
"cp943c",
"x-ms-cp932",
"ibm-943"} },
{ "additional.Big5", null, new String[] { "Big5",
"csBig5",
"windows-950"} },
{ "additional.Big5_HKSCS", null, new String[] { "Big5-HKSCS",
"ibm-1375"} },
{ "additional.EUC_KR", null, new String[] { "EUC-KR",
"windows-51949",
"ibm-970" } },
{ "additional.GBK", null, new String[] { "GBK" } },
{ "additional.x_MS950_HKSCS", null, new String[] { "x-ibm-1375_P100-2003",
"x-MS950-HKSCS",
"MS950_HKSCS" } },
{ "additional.x_windows_949", null, new String[] { "x-windows-949",
"MS949" } },
{ "additional.GB18030", null, new String[] { "GB18030",
"windows-54936",
"ibm-1392" } },
{ "additional.GB2312", null, new String[] { "GB2312",
"cp1383",
"EUC_CN" } }
};
return charsetsInfo;
}
}
|
apache/sentry | 35,201 | sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hive/TestOperationsPart1.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sentry.tests.e2e.hive;
import java.io.File;
import java.io.FileOutputStream;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Map;
import org.apache.sentry.provider.file.PolicyFile;
import org.junit.Before;
import org.junit.Test;
import com.google.common.io.Resources;
/**
* Test all operations that require index on table alone (part 1)
1. Create index : HiveOperation.CREATEINDEX
2. Drop index : HiveOperation.DROPINDEX
3. HiveOperation.ALTERINDEX_REBUILD
4. TODO: HiveOperation.ALTERINDEX_PROPS
*/
public class TestOperationsPart1 extends AbstractTestWithStaticConfiguration {
private PolicyFile policyFile;
final String tableName = "tb1";
static Map<String, String> privileges = new HashMap<String, String>();
static {
privileges.put("all_server", "server=server1->action=all");
privileges.put("create_server", "server=server1->action=create");
privileges.put("all_db1", "server=server1->db=" + DB1 + "->action=all");
privileges.put("select_db1", "server=server1->db=" + DB1 + "->action=select");
privileges.put("select_default", "server=server1->db=" + DEFAULT + "->action=select");
privileges.put("insert_db1", "server=server1->db=" + DB1 + "->action=insert");
privileges.put("create_db1", "server=server1->db=" + DB1 + "->action=create");
privileges.put("create_default", "server=server1->db=" + DEFAULT + "->action=create");
privileges.put("drop_db1", "server=server1->db=" + DB1 + "->action=drop");
privileges.put("drop_default", "server=server1->db=" + DEFAULT + "->action=drop");
privileges.put("alter_db1", "server=server1->db=" + DB1 + "->action=alter");
privileges.put("create_db2", "server=server1->db=" + DB2 + "->action=create");
privileges.put("all_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=all");
privileges.put("create_db1_tb1", "server=server1->db=" + DB1 + "->action=create");
privileges.put("drop_db1_tb1", "server=server1->db=" + DB1 + "->action=drop");
privileges.put("select_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=select");
privileges.put("insert_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=insert");
privileges.put("alter_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=alter");
privileges.put("alter_db1_ptab", "server=server1->db=" + DB1 + "->table=ptab->action=alter");
privileges.put("index_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=index");
privileges.put("lock_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=lock");
privileges.put("drop_db1_tb1", "server=server1->db=" + DB1 + "->table=tb1->action=drop");
privileges.put("insert_db2_tb2", "server=server1->db=" + DB2 + "->table=tb2->action=insert");
privileges.put("select_db1_view1", "server=server1->db=" + DB1 + "->table=view1->action=select");
privileges.put("create_db1_view1", "server=server1->db=" + DB1 + "->action=create");
privileges.put("all_db1_view1", "server=server1->db=" + DB1 + "->action=all");
privileges.put("drop_db1_view1", "server=server1->db=" + DB1 + "->action=drop");
privileges.put("select_db1_tb2", "server=server1->db=" + DB1 + "->table=tb2->action=select");
privileges.put("alter_db1_view1", "server=server1->db=" + DB1 + "->table=view1->action=alter");
}
@Before
public void setup() throws Exception{
policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP)
.setUserGroupMapping(StaticUserGroup.getStaticMapping());
writePolicyFile(policyFile);
}
private void adminCreate(String db, String table) throws Exception{
adminCreate(db, table, false);
}
private void adminCreate(String db, String table, boolean partitioned) throws Exception{
Connection connection = context.createConnection(ADMIN1);
Statement statement = context.createStatement(connection);
statement.execute("DROP DATABASE IF EXISTS " + db + " CASCADE");
statement.execute("CREATE DATABASE " + db);
if(table !=null) {
if (partitioned) {
statement.execute("CREATE table " + db + "." + table + " (a string) PARTITIONED BY (b string)");
} else{
statement.execute("CREATE table " + db + "." + table + " (a string)");
}
}
statement.close();
connection.close();
}
private void adminCreatePartition() throws Exception{
Connection connection = context.createConnection(ADMIN1);
Statement statement = context.createStatement(connection);
statement.execute("USE " + DB1);
statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
statement.close();
connection.close();
}
/* Test all operations that require create on Server
1. Create database : HiveOperation.CREATEDATABASE
*/
@Test
public void testCreateOnServer() throws Exception{
policyFile
.addPermissionsToRole("create_server", privileges.get("create_server"))
.addRolesToGroup(USERGROUP1, "create_server");
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("Create database " + DB2);
statement.close();
connection.close();
//Negative case
policyFile
.addPermissionsToRole("create_db1", privileges.get("create_db1"))
.addRolesToGroup(USERGROUP2, "create_db1");
writePolicyFile(policyFile);
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
context.assertSentrySemanticException(statement, "CREATE database " + DB1, semanticException);
statement.close();
connection.close();
}
@Test
public void testCreateMacro() throws Exception {
policyFile
.addPermissionsToRole("create_default", privileges.get("create_default"))
.addRolesToGroup(USERGROUP1, "create_default");
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))");
statement.close();connection.close();
//Negative case
policyFile
.addPermissionsToRole("select_default", privileges.get("select_default"))
.addRolesToGroup(USERGROUP2, "select_default");
writePolicyFile(policyFile);
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
context.assertSentrySemanticException(statement,
"CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))", semanticException);
statement.close();
connection.close();
}
@Test
public void testDropMacro() throws Exception {
adminCreate(DB1, null);
policyFile
.addPermissionsToRole("drop_default", privileges.get("drop_default"))
.addRolesToGroup(USERGROUP1, "drop_default");
writePolicyFile(policyFile);
Connection connection;
Statement statement;
connection = context.createConnection(ADMIN1);
statement = context.createStatement(connection);
statement.execute("CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))");
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
statement.execute("DROP TEMPORARY MACRO SIGMOID");
statement.close();
connection.close();
connection = context.createConnection(ADMIN1);
statement = context.createStatement(connection);
statement.execute("CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))");
//Negative case
adminCreate(DB1, null);
policyFile
.addPermissionsToRole("select_default", privileges.get("select_default"))
.addRolesToGroup(USERGROUP2, "select_default");
writePolicyFile(policyFile);
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
context.assertSentrySemanticException(statement, " DROP TEMPORARY MACRO SIGMOID", semanticException);
statement.close();
connection.close();
}
@Test
public void testInsertInto() throws Exception{
File dataFile;
dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
FileOutputStream to = new FileOutputStream(dataFile);
Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
to.close();
adminCreate(DB1, null);
policyFile
.addPermissionsToRole("all_db1", privileges.get("all_db1"))
.addPermissionsToRole("all_uri", "server=server1->uri=file://" + dataDir)
.addRolesToGroup(USERGROUP1, "all_db1", "all_uri");
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("create table bar (key int)");
statement.execute("load data local inpath '" + dataFile.getPath() + "' into table bar");
statement.execute("create table foo (key int) partitioned by (part int) stored as parquet");
statement.execute("insert into table foo PARTITION(part=1) select key from bar");
statement.close();
connection.close();
}
/* Test all operations that require create on Database alone
1. Create table : HiveOperation.CREATETABLE
*/
@Test
public void testCreateOnDatabase() throws Exception{
adminCreate(DB1, null);
policyFile
.addPermissionsToRole("create_db1", privileges.get("create_db1"))
.addPermissionsToRole("all_db1", privileges.get("all_db1"))
.addRolesToGroup(USERGROUP1, "create_db1")
.addRolesToGroup(USERGROUP2, "all_db1");
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("CREATE TABLE " + DB1 + ".tb2(a int)");
statement.close();
connection.close();
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
statement.execute("CREATE TABLE " + DB1 + ".tb3(a int)");
statement.close();
connection.close();
//Negative case
policyFile
.addPermissionsToRole("all_db1_tb1", privileges.get("select_db1"))
.addRolesToGroup(USERGROUP3, "all_db1_tb1");
writePolicyFile(policyFile);
connection = context.createConnection(USER3_1);
statement = context.createStatement(connection);
context.assertSentrySemanticException(statement, "CREATE TABLE " + DB1 + ".tb1(a int)", semanticException);
statement.close();
connection.close();
}
/* Test all operations that require drop on Database alone
1. Drop database : HiveOperation.DROPDATABASE
*/
@Test
public void testDropOnDatabase() throws Exception{
adminCreate(DB1, null);
policyFile
.addPermissionsToRole("drop_db1", privileges.get("drop_db1"))
.addRolesToGroup(USERGROUP1, "drop_db1");
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("DROP DATABASE " + DB1);
statement.close();
connection.close();
adminCreate(DB1, null);
policyFile
.addPermissionsToRole("all_db1", privileges.get("all_db1"))
.addRolesToGroup(USERGROUP2, "all_db1");
writePolicyFile(policyFile);
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
statement.execute("DROP DATABASE " + DB1);
statement.close();
connection.close();
//Negative case
adminCreate(DB1, null);
policyFile
.addPermissionsToRole("select_db1", privileges.get("select_db1"))
.addRolesToGroup(USERGROUP3, "select_db1");
writePolicyFile(policyFile);
connection = context.createConnection(USER3_1);
statement = context.createStatement(connection);
context.assertSentrySemanticException(statement, "drop database " + DB1, semanticException);
statement.close();
connection.close();
}
/* Test all operations that require alter on Database alone
1. Alter database : HiveOperation.ALTERDATABASE
2. Alter database : HiveOperation.ALTERDATABASE_OWNER
*/
@Test
public void testAlterOnDatabase() throws Exception{
adminCreate(DB1, null);
policyFile
.addPermissionsToRole("alter_db1", privileges.get("alter_db1"))
.addPermissionsToRole("all_db1", privileges.get("all_db1"))
.addRolesToGroup(USERGROUP2, "all_db1")
.addRolesToGroup(USERGROUP1, "alter_db1");
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')");
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
statement.execute("ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')");
// Negative case for admin
connection = context.createConnection(ADMIN1);
statement = context.createStatement(connection);
context.assertSentrySemanticException(statement, "ALTER DATABASE " + DB1 + " SET OWNER USER " + USER1_1, semanticException);
statement.close();
connection.close();
//Negative case
adminCreate(DB1, null);
policyFile
.addPermissionsToRole("select_db1", privileges.get("select_db1"))
.addRolesToGroup(USERGROUP3, "select_db1");
writePolicyFile(policyFile);
connection = context.createConnection(USER3_1);
statement = context.createStatement(connection);
context.assertSentrySemanticException(statement, "ALTER DATABASE " + DB1 + " SET DBPROPERTIES ('comment'='comment')", semanticException);
context.assertSentrySemanticException(statement, "ALTER DATABASE " + DB1 + " SET OWNER USER " + USER1_1, semanticException);
statement.close();
connection.close();
}
/* SELECT/INSERT on DATABASE
1. HiveOperation.DESCDATABASE
*/
@Test
public void testDescDB() throws Exception {
adminCreate(DB1, tableName);
policyFile
.addPermissionsToRole("select_db1", privileges.get("select_db1"))
.addPermissionsToRole("insert_db1", privileges.get("insert_db1"))
.addRolesToGroup(USERGROUP1, "select_db1")
.addRolesToGroup(USERGROUP2, "insert_db1");
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("describe database " + DB1);
statement.close();
connection.close();
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
statement.execute("describe database " + DB1);
statement.close();
connection.close();
//Negative case
policyFile
.addPermissionsToRole("all_db1_tb1", privileges.get("all_db1_tb1"))
.addRolesToGroup(USERGROUP3, "all_db1_tb1");
writePolicyFile(policyFile);
connection = context.createConnection(USER3_1);
statement = context.createStatement(connection);
context.assertSentrySemanticException(statement, "describe database " + DB1, semanticException);
statement.close();
connection.close();
}
private void assertSemanticException(Statement stmt, String command) throws SQLException{
context.assertSentrySemanticException(stmt, command, semanticException);
}
/*
1. Analyze table (HiveOperation.QUERY) : select + insert on table
*/
@Test
public void testSelectAndInsertOnTable() throws Exception {
adminCreate(DB1, tableName, true);
adminCreatePartition();
policyFile
.addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
.addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
.addRolesToGroup(USERGROUP1, "select_db1_tb1", "insert_db1_tb1");
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("ANALYZE TABLE tb1 PARTITION (b='1' ) COMPUTE STATISTICS");
statement.close();
connection.close();
}
/* Operations which require select on table alone
1. HiveOperation.QUERY
2. HiveOperation.SHOW_TBLPROPERTIES
3. HiveOperation.SHOW_CREATETABLE
4. HiveOperation.SHOWINDEXES
5. HiveOperation.SHOWCOLUMNS
6. Describe tb1 : HiveOperation.DESCTABLE5.
7. HiveOperation.SHOWPARTITIONS
8. TODO: show functions?
9. HiveOperation.SHOW_TABLESTATUS
*/
@Test
public void testSelectOnTable() throws Exception {
adminCreate(DB1, tableName, true);
adminCreatePartition();
policyFile
.addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
.addRolesToGroup(USERGROUP1, "select_db1_tb1");
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("select * from tb1");
statement.executeQuery("SHOW Partitions tb1");
statement.executeQuery("SHOW TBLPROPERTIES tb1");
statement.executeQuery("SHOW CREATE TABLE tb1");
statement.executeQuery("SHOW indexes on tb1");
statement.executeQuery("SHOW COLUMNS from tb1");
statement.executeQuery("SHOW functions '.*'");
statement.executeQuery("SHOW TABLE EXTENDED IN " + DB1 + " LIKE 'tb*'");
statement.executeQuery("DESCRIBE tb1");
statement.executeQuery("DESCRIBE tb1 PARTITION (b=1)");
statement.close();
connection.close();
//Negative case
adminCreate(DB2, tableName);
policyFile
.addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
.addRolesToGroup(USERGROUP3, "insert_db1_tb1");
writePolicyFile(policyFile);
connection = context.createConnection(USER3_1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
context.assertSentrySemanticException(statement, "select * from tb1", semanticException);
context.assertSentrySemanticException(statement,
"SHOW TABLE EXTENDED IN " + DB2 + " LIKE 'tb*'", semanticException);
statement.close();
connection.close();
}
/* Operations which require insert on table alone
1. HiveOperation.SHOW_TBLPROPERTIES
2. HiveOperation.SHOW_CREATETABLE
3. HiveOperation.SHOWINDEXES
4. HiveOperation.SHOWCOLUMNS
5. HiveOperation.DESCTABLE
6. HiveOperation.SHOWPARTITIONS
7. TODO: show functions?
8. TODO: lock, unlock, Show locks
9. HiveOperation.SHOW_TABLESTATUS
*/
@Test
public void testInsertOnTable() throws Exception {
adminCreate(DB1, tableName, true);
adminCreatePartition();
policyFile
.addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
.addRolesToGroup(USERGROUP1, "insert_db1_tb1");
writePolicyFile(policyFile);
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("Use " + DB1);
/*statement.execute("LOCK TABLE tb1 EXCLUSIVE");
statement.execute("UNLOCK TABLE tb1");
*/
statement.executeQuery("SHOW TBLPROPERTIES tb1");
statement.executeQuery("SHOW CREATE TABLE tb1");
statement.executeQuery("SHOW indexes on tb1");
statement.executeQuery("SHOW COLUMNS from tb1");
statement.executeQuery("SHOW functions '.*'");
//statement.executeQuery("SHOW LOCKS tb1");
statement.executeQuery("SHOW TABLE EXTENDED IN " + DB1 + " LIKE 'tb*'");
//NoViableAltException
//statement.executeQuery("SHOW transactions");
//statement.executeQuery("SHOW compactions");
statement.executeQuery("DESCRIBE tb1");
statement.executeQuery("DESCRIBE tb1 PARTITION (b=1)");
statement.executeQuery("SHOW Partitions tb1");
statement.close();
connection.close();
}
@Test
public void testAlterTableBucket() throws Exception {
adminCreate(DB1, tableName, true);
Connection connection;
Statement statement;
connection = context.createConnection(ADMIN1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
statement.execute("ALTER TABLE tb1 ADD PARTITION (b = '1')");
policyFile.addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
.addRolesToGroup(USERGROUP1, "alter_db1_tb1")
.addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
.addRolesToGroup(USERGROUP2, "insert_db1_tb1");
writePolicyFile(policyFile);
//positive test cases
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("ALTER TABLE tb1 INTO 6 BUCKETS");
statement.execute("ALTER TABLE tb1 PARTITION (b = '1') INTO 6 BUCKETS");
statement.close();
connection.close();
//negative test cases
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
context.assertSentrySemanticException(statement, "ALTER TABLE tb1 INTO 6 BUCKETS",
semanticException);
context.assertSentrySemanticException(statement, "ALTER TABLE tb1 PARTITION (b = '1') INTO 6 BUCKETS",
semanticException);
statement.close();
connection.close();
}
@Test
public void testAlterTablePartColType() throws Exception {
adminCreate(DB1, tableName, true);
policyFile
.addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
.addRolesToGroup(USERGROUP1, "alter_db1_tb1")
.addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
.addRolesToGroup(USERGROUP2, "insert_db1_tb1");
writePolicyFile(policyFile);
//positive test cases
Connection connection = context.createConnection(USER1_1);
Statement statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("ALTER TABLE tb1 PARTITION COLUMN (b string)");
statement.close();
connection.close();
//negative test cases
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
context.assertSentrySemanticException(statement, "ALTER TABLE tb1 PARTITION COLUMN (b string)", semanticException);
statement.close();
connection.close();
}
@Test
public void testAlterRenameTableWithinDB() throws Exception {
adminCreate(DB1, "tb1", true);
Connection connection;
Statement statement;
//Setup
policyFile
.addPermissionsToRole("create_db1", privileges.get("create_db1"))
.addPermissionsToRole("all_db1_tb1", privileges.get("all_db1_tb1"))
.addRolesToGroup(USERGROUP1, "create_db1", "all_db1_tb1")
.addRolesToGroup(USERGROUP2, "create_db1");
writePolicyFile(policyFile);
String command = "ALTER TABLE " + DB1 + ".tb1 RENAME TO " + DB1 + ".tb2";
//negative test cases
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
context.assertSentrySemanticException(statement, command, semanticException);
statement.close();
connection.close();
//positive test cases
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
statement.execute(command);
statement.close();
connection.close();
}
@Test
public void testAlterRenameTableCrossDB() throws Exception {
adminCreate(DB1, tableName, true);
adminCreate(DB2, null, true);
Connection connection;
Statement statement;
//Setup
policyFile
.addPermissionsToRole("create_db2", privileges.get("create_db2"))
.addPermissionsToRole("create_db1_tb1", privileges.get("create_db1_tb1"))
.addPermissionsToRole("all_db1_tb1", privileges.get("all_db1_tb1"))
.addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
.addRolesToGroup(USERGROUP1, "create_db2", "all_db1_tb1")
.addRolesToGroup(USERGROUP2, "create_db2", "select_db1_tb1");
writePolicyFile(policyFile);
String command = "ALTER TABLE " + DB1 + ".tb1 RENAME TO " + DB2 + ".tb2";
//negative test cases
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
context.assertSentrySemanticException(statement, command, semanticException);
statement.close();
connection.close();
//positive test cases
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
statement.execute(command);
statement.close();
connection.close();
}
@Test
public void testAlterRenameView() throws Exception {
adminCreate(DB1, tableName, true);
Connection connection;
Statement statement;
//Setup
connection = context.createConnection(ADMIN1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("CREATE VIEW view1 AS SELECT * FROM tb1");
policyFile
.addPermissionsToRole("create_db1_view1", privileges.get("create_db1_view1"))
.addPermissionsToRole("all_db1_view1", privileges.get("all_db1_view1"))
.addPermissionsToRole("create_db1", privileges.get("create_db1"))
.addPermissionsToRole("select_db1_tb1", privileges.get("select_db1_tb1"))
.addPermissionsToRole("select_db1_view1", privileges.get("select_db1_view1"))
.addRolesToGroup(USERGROUP1, "create_db1", "all_db1_view1")
.addRolesToGroup(USERGROUP2, "create_db1", "select_db1_tb1", "select_db1_view1");
writePolicyFile(policyFile);
//negative test cases
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
context.assertSentrySemanticException(statement, "ALTER VIEW view1 RENAME TO view2",
semanticException);
statement.close();
connection.close();
//positive test cases
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("ALTER VIEW view1 RENAME TO view2");
statement.close();
connection.close();
}
@Test
public void testAlterViewAs() throws Exception {
adminCreate(DB1, tableName, true);
Connection connection;
Statement statement;
//Setup
connection = context.createConnection(ADMIN1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("CREATE TABLE tb2 (foo int)");
statement.execute("CREATE VIEW view1 AS SELECT * FROM tb1");
policyFile
.addPermissionsToRole("select_db1_tb2", privileges.get("select_db1_tb2"))
.addPermissionsToRole("alter_db1_view1", privileges.get("alter_db1_view1"))
.addPermissionsToRole("drop_db1_view1", privileges.get("drop_db1_view1"))
.addPermissionsToRole("create_db1", privileges.get("create_db1"))
.addRolesToGroup(USERGROUP1, "select_db1_tb2", "alter_db1_view1")
.addPermissionsToRole("select_db1_view1", privileges.get("select_db1_view1"))
.addRolesToGroup(USERGROUP2, "create_db1", "select_db1_view1");
writePolicyFile(policyFile);
//positive test cases
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("ALTER VIEW view1 AS SELECT * FROM tb2");
statement.close();
connection.close();
//negative test cases
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
context.assertSentrySemanticException(statement, "ALTER VIEW view1 AS SELECT * FROM tb2",
semanticException);
statement.close();
connection.close();
}
/* Test all operations that require alter on table
1. HiveOperation.ALTERTABLE_PROPERTIES
2. HiveOperation.ALTERTABLE_SERDEPROPERTIES
3. HiveOperation.ALTERTABLE_CLUSTER_SORT
4. HiveOperation.ALTERTABLE_TOUCH
5. HiveOperation.ALTERTABLE_FILEFORMAT
6. HiveOperation.ALTERTABLE_RENAMEPART
7. HiveOperation.ALTERPARTITION_SERDEPROPERTIES
8. TODO: archive partition
9. TODO: unarchive partition
10. HiveOperation.ALTERPARTITION_FILEFORMAT
11. TODO: partition touch (is it same as HiveOperation.ALTERTABLE_TOUCH?)
12. HiveOperation.ALTERTABLE_RENAMECOL
13. HiveOperation.ALTERTABLE_ADDCOLS
14. HiveOperation.ALTERTABLE_REPLACECOLS
15. TODO: HiveOperation.ALTERVIEW_PROPERTIES
16. TODO: HiveOperation.ALTERTABLE_SERIALIZER
17. TODO: HiveOperation.ALTERPARTITION_SERIALIZER
*/
@Test
public void testAlterTable() throws Exception {
adminCreate(DB1, tableName, true);
Connection connection;
Statement statement;
//Setup
connection = context.createConnection(ADMIN1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '10') ");
statement.execute("ALTER TABLE tb1 ADD IF NOT EXISTS PARTITION (b = '1') ");
statement.execute("DROP TABLE IF EXISTS ptab");
statement.execute("CREATE TABLE ptab (a int) STORED AS PARQUET");
policyFile
.addPermissionsToRole("alter_db1_tb1", privileges.get("alter_db1_tb1"))
.addPermissionsToRole("alter_db1_ptab", privileges.get("alter_db1_ptab"))
.addRolesToGroup(USERGROUP1, "alter_db1_tb1", "alter_db1_ptab")
.addPermissionsToRole("insert_db1_tb1", privileges.get("insert_db1_tb1"))
.addRolesToGroup(USERGROUP2, "insert_db1_tb1");
writePolicyFile(policyFile);
//Negative test cases
connection = context.createConnection(USER2_1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
assertSemanticException(statement, "ALTER TABLE tb1 SET TBLPROPERTIES ('comment' = 'new_comment')");
assertSemanticException(statement, "ALTER TABLE tb1 SET SERDEPROPERTIES ('field.delim' = ',')");
assertSemanticException(statement, "ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
assertSemanticException(statement, "ALTER TABLE tb1 TOUCH");
assertSemanticException(statement, "ALTER TABLE tb1 SET FILEFORMAT RCFILE");
assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) RENAME TO PARTITION (b = 2)");
assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) SET SERDEPROPERTIES ('field.delim' = ',')");
//assertSemanticException(statement, "ALTER TABLE tb1 ARCHIVE PARTITION (b = 2)");
//assertSemanticException(statement, "ALTER TABLE tb1 UNARCHIVE PARTITION (b = 2)");
assertSemanticException(statement, "ALTER TABLE tb1 PARTITION (b = 10) SET FILEFORMAT RCFILE");
assertSemanticException(statement, "ALTER TABLE tb1 TOUCH PARTITION (b = 10)");
assertSemanticException(statement, "ALTER TABLE tb1 CHANGE COLUMN a c int");
assertSemanticException(statement, "ALTER TABLE tb1 ADD COLUMNS (a int)");
assertSemanticException(statement, "ALTER TABLE ptab REPLACE COLUMNS (a int, c int)");
assertSemanticException(statement, "MSCK REPAIR TABLE tb1");
//assertSemanticException(statement, "ALTER VIEW view1 SET TBLPROPERTIES ('comment' = 'new_comment')");
statement.close();
connection.close();
//Positive cases
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
statement.execute("Use " + DB1);
statement.execute("ALTER TABLE tb1 SET TBLPROPERTIES ('comment' = 'new_comment')");
statement.execute("ALTER TABLE tb1 SET SERDEPROPERTIES ('field.delim' = ',')");
statement.execute("ALTER TABLE tb1 CLUSTERED BY (a) SORTED BY (a) INTO 1 BUCKETS");
statement.execute("ALTER TABLE tb1 TOUCH");
statement.execute("ALTER TABLE tb1 SET FILEFORMAT RCFILE");
statement.execute("ALTER TABLE tb1 PARTITION (b = 1) RENAME TO PARTITION (b = 2)");
statement.execute("ALTER TABLE tb1 PARTITION (b = 2) SET SERDEPROPERTIES ('field.delim' = ',')");
//statement.execute("ALTER TABLE tb1 ARCHIVE PARTITION (b = 2)");
//statement.execute("ALTER TABLE tb1 UNARCHIVE PARTITION (b = 2)");
statement.execute("ALTER TABLE tb1 PARTITION (b = 2) SET FILEFORMAT RCFILE");
statement.execute("ALTER TABLE tb1 TOUCH PARTITION (b = 2)");
statement.execute("ALTER TABLE tb1 CHANGE COLUMN a c int");
statement.execute("ALTER TABLE tb1 ADD COLUMNS (a int)");
statement.execute("ALTER TABLE ptab REPLACE COLUMNS (a int, c int)");
statement.execute("MSCK REPAIR TABLE tb1");
//statement.execute("ALTER VIEW view1 SET TBLPROPERTIES ('comment' = 'new_comment')");
statement.close();
connection.close();
}
@Test
public void testDbPrefix() throws Exception {
Connection connection;
Statement statement;
connection = context.createConnection(ADMIN1);
statement = context.createStatement(connection);
//Create db1.table1
statement.execute("create database " + DB1);
statement.execute("create table " + DB1 + "." + tableName + "(a int)");
//Create db2.table1
statement.execute("create database " + DB2);
statement.execute("create table " + DB2 + "." + tableName + "(a int)");
//grant on db1.table1
policyFile
.addPermissionsToRole("all_db1_tb1", privileges.get("all_db1_tb1"))
.addRolesToGroup(USERGROUP1, "all_db1_tb1");
writePolicyFile(policyFile);
connection = context.createConnection(USER1_1);
statement = context.createStatement(connection);
//Use db2
statement.execute("use " + DB1);
//MSCK db1.table1
assertSemanticException(statement, "MSCK REPAIR TABLE " + DB2 + "." + tableName);
statement.execute("MSCK REPAIR TABLE " + DB1 + "." + tableName);
}
}
|
googleapis/google-cloud-java | 35,754 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/QuestionAnsweringQualityInput.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Input for question answering quality metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.QuestionAnsweringQualityInput}
*/
public final class QuestionAnsweringQualityInput extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.QuestionAnsweringQualityInput)
QuestionAnsweringQualityInputOrBuilder {
private static final long serialVersionUID = 0L;
// Use QuestionAnsweringQualityInput.newBuilder() to construct.
private QuestionAnsweringQualityInput(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private QuestionAnsweringQualityInput() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new QuestionAnsweringQualityInput();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_QuestionAnsweringQualityInput_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_QuestionAnsweringQualityInput_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput.class,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput.Builder.class);
}
private int bitField0_;
public static final int METRIC_SPEC_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metricSpec_;
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the metricSpec field is set.
*/
@java.lang.Override
public boolean hasMetricSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The metricSpec.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec getMetricSpec() {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec.getDefaultInstance()
: metricSpec_;
}
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpecOrBuilder
getMetricSpecOrBuilder() {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec.getDefaultInstance()
: metricSpec_;
}
public static final int INSTANCE_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance_;
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
@java.lang.Override
public boolean hasInstance() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance getInstance() {
return instance_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance.getDefaultInstance()
: instance_;
}
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstanceOrBuilder
getInstanceOrBuilder() {
return instance_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance.getDefaultInstance()
: instance_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getMetricSpec());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getInstance());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMetricSpec());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getInstance());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput other =
(com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput) obj;
if (hasMetricSpec() != other.hasMetricSpec()) return false;
if (hasMetricSpec()) {
if (!getMetricSpec().equals(other.getMetricSpec())) return false;
}
if (hasInstance() != other.hasInstance()) return false;
if (hasInstance()) {
if (!getInstance().equals(other.getInstance())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMetricSpec()) {
hash = (37 * hash) + METRIC_SPEC_FIELD_NUMBER;
hash = (53 * hash) + getMetricSpec().hashCode();
}
if (hasInstance()) {
hash = (37 * hash) + INSTANCE_FIELD_NUMBER;
hash = (53 * hash) + getInstance().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Input for question answering quality metric.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.QuestionAnsweringQualityInput}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.QuestionAnsweringQualityInput)
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInputOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_QuestionAnsweringQualityInput_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_QuestionAnsweringQualityInput_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput.class,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMetricSpecFieldBuilder();
getInstanceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
metricSpec_ = null;
if (metricSpecBuilder_ != null) {
metricSpecBuilder_.dispose();
metricSpecBuilder_ = null;
}
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.EvaluationServiceProto
.internal_static_google_cloud_aiplatform_v1_QuestionAnsweringQualityInput_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput build() {
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput buildPartial() {
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput result =
new com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.metricSpec_ = metricSpecBuilder_ == null ? metricSpec_ : metricSpecBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.instance_ = instanceBuilder_ == null ? instance_ : instanceBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput) {
return mergeFrom((com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput other) {
if (other
== com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput.getDefaultInstance())
return this;
if (other.hasMetricSpec()) {
mergeMetricSpec(other.getMetricSpec());
}
if (other.hasInstance()) {
mergeInstance(other.getInstance());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getMetricSpecFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getInstanceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metricSpec_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpecOrBuilder>
metricSpecBuilder_;
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the metricSpec field is set.
*/
public boolean hasMetricSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The metricSpec.
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec getMetricSpec() {
if (metricSpecBuilder_ == null) {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec.getDefaultInstance()
: metricSpec_;
} else {
return metricSpecBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMetricSpec(
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec value) {
if (metricSpecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
metricSpec_ = value;
} else {
metricSpecBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setMetricSpec(
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec.Builder builderForValue) {
if (metricSpecBuilder_ == null) {
metricSpec_ = builderForValue.build();
} else {
metricSpecBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeMetricSpec(
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec value) {
if (metricSpecBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& metricSpec_ != null
&& metricSpec_
!= com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec
.getDefaultInstance()) {
getMetricSpecBuilder().mergeFrom(value);
} else {
metricSpec_ = value;
}
} else {
metricSpecBuilder_.mergeFrom(value);
}
if (metricSpec_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearMetricSpec() {
bitField0_ = (bitField0_ & ~0x00000001);
metricSpec_ = null;
if (metricSpecBuilder_ != null) {
metricSpecBuilder_.dispose();
metricSpecBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec.Builder
getMetricSpecBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getMetricSpecFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpecOrBuilder
getMetricSpecOrBuilder() {
if (metricSpecBuilder_ != null) {
return metricSpecBuilder_.getMessageOrBuilder();
} else {
return metricSpec_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec.getDefaultInstance()
: metricSpec_;
}
}
/**
*
*
* <pre>
* Required. Spec for question answering quality score metric.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec metric_spec = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpecOrBuilder>
getMetricSpecFieldBuilder() {
if (metricSpecBuilder_ == null) {
metricSpecBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpec.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualitySpecOrBuilder>(
getMetricSpec(), getParentForChildren(), isClean());
metricSpec_ = null;
}
return metricSpecBuilder_;
}
private com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstanceOrBuilder>
instanceBuilder_;
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the instance field is set.
*/
public boolean hasInstance() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The instance.
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance getInstance() {
if (instanceBuilder_ == null) {
return instance_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance.getDefaultInstance()
: instance_;
} else {
return instanceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance value) {
if (instanceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
instance_ = value;
} else {
instanceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setInstance(
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance.Builder builderForValue) {
if (instanceBuilder_ == null) {
instance_ = builderForValue.build();
} else {
instanceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeInstance(
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance value) {
if (instanceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& instance_ != null
&& instance_
!= com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance
.getDefaultInstance()) {
getInstanceBuilder().mergeFrom(value);
} else {
instance_ = value;
}
} else {
instanceBuilder_.mergeFrom(value);
}
if (instance_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearInstance() {
bitField0_ = (bitField0_ & ~0x00000002);
instance_ = null;
if (instanceBuilder_ != null) {
instanceBuilder_.dispose();
instanceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance.Builder
getInstanceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getInstanceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstanceOrBuilder
getInstanceOrBuilder() {
if (instanceBuilder_ != null) {
return instanceBuilder_.getMessageOrBuilder();
} else {
return instance_ == null
? com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance.getDefaultInstance()
: instance_;
}
}
/**
*
*
* <pre>
* Required. Question answering quality instance.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance instance = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstanceOrBuilder>
getInstanceFieldBuilder() {
if (instanceBuilder_ == null) {
instanceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstance.Builder,
com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInstanceOrBuilder>(
getInstance(), getParentForChildren(), isClean());
instance_ = null;
}
return instanceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.QuestionAnsweringQualityInput)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.QuestionAnsweringQualityInput)
private static final com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput();
}
public static com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<QuestionAnsweringQualityInput> PARSER =
new com.google.protobuf.AbstractParser<QuestionAnsweringQualityInput>() {
@java.lang.Override
public QuestionAnsweringQualityInput parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<QuestionAnsweringQualityInput> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<QuestionAnsweringQualityInput> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.QuestionAnsweringQualityInput getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/coherence | 35,862 | prj/coherence-core/src/main/java/com/tangosol/net/SocketProviderFactory.java | /*
* Copyright (c) 2000, 2023, Oracle and/or its affiliates.
*
* Licensed under the Universal Permissive License v 1.0 as shown at
* https://oss.oracle.com/licenses/upl.
*/
package com.tangosol.net;
import com.oracle.coherence.common.internal.net.DemultiplexedSocketProvider;
import com.oracle.coherence.common.internal.net.MultiplexedSocketProvider;
import com.oracle.coherence.common.net.SdpSocketProvider;
import com.oracle.coherence.common.net.SocketProvider;
import com.oracle.coherence.common.net.SSLSettings;
import com.oracle.coherence.common.net.SSLSocketProvider;
import com.oracle.coherence.common.net.TcpSocketProvider;
import com.tangosol.coherence.config.builder.SSLSocketProviderDependenciesBuilder;
import com.tangosol.coherence.config.builder.SocketProviderBuilder;
import com.tangosol.config.ConfigurationException;
import com.tangosol.config.xml.ProcessingContext;
import com.tangosol.internal.net.LegacyXmlSocketProviderFactoryDependencies;
import com.tangosol.internal.net.cluster.DefaultClusterDependencies;
import com.tangosol.internal.net.ssl.LegacyXmlSSLSocketProviderDependencies;
import com.tangosol.run.xml.XmlElement;
import com.tangosol.util.SafeHashMap;
import java.io.IOException;
import java.net.MulticastSocket;
import java.util.Collections;
import java.util.Map;
/**
* The SocketProviderFactory produces SocketProviders.
*
* @author mf, jh, bb 2010.04.21
* @since Coherence 3.6
*/
public class SocketProviderFactory
{
/**
* Construct a SocketProviderFactory
*/
public SocketProviderFactory()
{
this(null);
}
/**
* Construct a SocketProviderFactory.
*
* @param dependencies SocketProviderFactory dependencies or null
*/
public SocketProviderFactory(Dependencies dependencies)
{
m_Dependencies = dependencies == null
? new DefaultDependencies()
: dependencies;
m_Dependencies.setSocketProviderFactory(this);
f_defaultSocketProviderBuilder = new SocketProviderBuilder(null, m_Dependencies, true);
}
/**
* Get SocketProviderDependencies object
*
* @return SocketProviderDependencies
*/
public Dependencies getDependencies()
{
return m_Dependencies;
}
/**
* Return the default {@link SocketProviderBuilder}.
*
* @return the default {@link SocketProviderBuilder}
*/
public SocketProviderBuilder getDefaultSocketProviderBuilder()
{
return f_defaultSocketProviderBuilder;
}
/**
* Return a Socket provider. Only there for Proxy till they move to use
* MultiplexedSocketProvider
*
* @param xml provider definition, or null for the default provider
*
* @return the provider
*/
@Deprecated
@SuppressWarnings("DeprecatedIsStillUsed")
public SocketProvider getLegacySocketProvider(XmlElement xml)
{
String sId = LegacyXmlSocketProviderFactoryDependencies.getProviderId(xml);
if (sId == null)
{
return DEFAULT_LEGACY_SOCKET_PROVIDER;
}
else if (sId.equals(UNNAMED_PROVIDER_ID))
{
LegacyXmlSocketProviderFactoryDependencies depsUnnamed =
new LegacyXmlSocketProviderFactoryDependencies(sId, xml);
depsUnnamed.setSocketProviderFactory(this);
return ensureSocketProvider(sId, depsUnnamed, -1);
}
return ensureSocketProvider(sId, getDependencies(), -1);
}
/**
* Return a {@link SocketProviderBuilder} configured from the specified xml.
*
* @param xml provider definition, or null for the default provider
*
* @return the {@link SocketProviderBuilder}
*/
@Deprecated
public SocketProviderBuilder getSocketProviderBuilder(XmlElement xml)
{
return getSocketProviderBuilder(xml, true);
}
/**
* Return a {@link SocketProviderBuilder} configured from the specified xml.
*
* @param xml provider definition, or null for the default provider
* @param fCanUseGlobal {@code true} to allow use of a global provider
*
* @return the {@link SocketProviderBuilder}
*/
@Deprecated
public SocketProviderBuilder getSocketProviderBuilder(XmlElement xml, boolean fCanUseGlobal)
{
String sId = LegacyXmlSocketProviderFactoryDependencies.getProviderId(xml);
if (sId == null)
{
return new SocketProviderBuilder(DEFAULT_SOCKET_PROVIDER, fCanUseGlobal);
}
SocketProvider provider;
if (sId.equals(UNNAMED_PROVIDER_ID))
{
LegacyXmlSocketProviderFactoryDependencies depsUnnamed =
new LegacyXmlSocketProviderFactoryDependencies(sId, xml);
depsUnnamed.setSocketProviderFactory(this);
provider = ensureSocketProvider(sId, depsUnnamed, 0);
}
else
{
provider = ensureSocketProvider(sId, getDependencies(), 0);
}
return new SocketProviderBuilder(provider, false);
}
/**
* Return a Socket provider
*
* @param xml provider definition, or null for the default provider
*
* @return the provider
*/
@Deprecated
public SocketProvider getSocketProvider(XmlElement xml)
{
String sId = LegacyXmlSocketProviderFactoryDependencies.getProviderId(xml);
if (sId == null)
{
if (s_globalSocketProviderBuilder != null)
{
return s_globalSocketProviderBuilder.realize(null, null, null);
}
return DEFAULT_SOCKET_PROVIDER;
}
else if (sId.equals(UNNAMED_PROVIDER_ID))
{
LegacyXmlSocketProviderFactoryDependencies depsUnnamed =
new LegacyXmlSocketProviderFactoryDependencies(sId, xml);
depsUnnamed.setSocketProviderFactory(this);
return ensureSocketProvider(sId, depsUnnamed, 0);
}
return ensureSocketProvider(sId, getDependencies(), 0);
}
/**
* Return a Socket provider
*
* @param sId provider name defined in <socket-providers>
*
* @return the provider
*/
public SocketProvider getSocketProvider(String sId)
{
return getSocketProvider(sId, getDependencies(), 0);
}
/**
* Return a Socket provider
*
* @param sId provider name defined in <socket-providers>
* @param deps anonymous {@link SocketProviderFactory.Dependencies}
* @param nSubport Sub-port for De-multiplexed socket provider.
* If it is 0, then it implies Multiplexed socket provider.
*
* @return the provider
*/
@SuppressWarnings("unused")
public SocketProvider getSocketProvider(String sId, Dependencies deps, int nSubport)
{
if (sId == null)
{
return DEFAULT_SOCKET_PROVIDER;
}
return ensureSocketProvider(sId, deps , 0);
}
/**
* Return a Demultiplexed Socket provider
*
* @param xml provider definition, or null for the default provider
* @param nSubport subport for demultiplexed socket provider.
*
* @return the provider
*/
@Deprecated
@SuppressWarnings("DeprecatedIsStillUsed")
public SocketProvider getDemultiplexedSocketProvider(XmlElement xml, int nSubport)
{
String sId = LegacyXmlSocketProviderFactoryDependencies.getProviderId(xml);
if (sId == null)
{
return new DemultiplexedSocketProvider(TcpSocketProvider.MULTIPLEXED, nSubport);
}
else if (sId.equals(UNNAMED_PROVIDER_ID))
{
LegacyXmlSocketProviderFactoryDependencies depsUnnamed =
new LegacyXmlSocketProviderFactoryDependencies(sId, xml);
depsUnnamed.setSocketProviderFactory(this);
return ensureSocketProvider(sId, depsUnnamed, nSubport);
}
return ensureSocketProvider(sId, getDependencies(), nSubport);
}
/**
* Return a Demultiplexed Socket provider
*
* @param sId provider definition identifier or {@link #UNNAMED_PROVIDER_ID} for inlined,
* anonymous socket provider
* @param deps inlined socket provider dependencies, must be non-null if {@code sId} is
* set to {@link #UNNAMED_PROVIDER_ID}
* @param nSubport subport for demultiplexed socket provider
* @param fCanUseGlobal {@code true} if the global socket provider can be used
*
* @return a {@link DemultiplexedSocketProvider} based on method parameters.
*/
public SocketProvider getDemultiplexedSocketProvider(String sId, SocketProviderFactory.Dependencies deps,
int nSubport, boolean fCanUseGlobal)
{
if (fCanUseGlobal && s_globalSocketProviderBuilder != null)
{
return s_globalSocketProviderBuilder.getDemultiplexedSocketProvider(nSubport);
}
return sId == null
? new DemultiplexedSocketProvider(TcpSocketProvider.MULTIPLEXED, nSubport)
: ensureSocketProvider(sId, deps, nSubport);
}
/**
* Return a Demultiplexed Socket provider
*
* @param builder use socket provider id and dependencies from this {@link SocketProviderBuilder}
* @param nSubport subport for demultiplexed socket provider.
*
* @return a {@link DemultiplexedSocketProvider} based on method parameters.
*/
public SocketProvider getDemultiplexedSocketProvider(SocketProviderBuilder builder, int nSubport)
{
return getDemultiplexedSocketProvider(builder.getId(), builder.getDependencies(),
nSubport, builder.canUseGlobal());
}
/**
* Return an instance of the specified DatagramSocketProvider, creating it if necessary.
*
* @param xml the provider definition, or null for the default provider
*
* @param nSubport subport for a demultiplexed socket provider.
*
* @return the provider
*/
@Deprecated
@SuppressWarnings("DeprecatedIsStillUsed")
public DatagramSocketProvider getDatagramSocketProvider(XmlElement xml, int nSubport)
{
String sId = LegacyXmlSocketProviderFactoryDependencies.getProviderId(xml);
if (sId == null)
{
return DEFAULT_DATAGRAM_SOCKET_PROVIDER;
}
if (sId.equals(UNNAMED_PROVIDER_ID))
{
LegacyXmlSocketProviderFactoryDependencies depsUnnamed =
new LegacyXmlSocketProviderFactoryDependencies(sId, xml);
depsUnnamed.setSocketProviderFactory(this);
return ensureDatagramSocketProvider(sId, depsUnnamed, nSubport);
}
return ensureDatagramSocketProvider(sId, getDependencies(), nSubport);
}
/**
* Return an instance of the specified DatagramSocketProvider, creating it if necessary.
*
* @param sId provider definition identifier or {@link #UNNAMED_PROVIDER_ID} for inlined,
* anonymous socket provider
* @param deps inlined socket provider dependencies, must be non-null if {@code sId}
* is set to {@link #UNNAMED_PROVIDER_ID}
* @param nSubport subport for {@link DatagramSocketProvider}.
* @param fCanUseGlobal {@code true} if the global socket provider can be used
*
* @return a {@link DatagramSocketProvider} configured via method parameters
*/
public DatagramSocketProvider getDatagramSocketProvider(String sId, SocketProviderFactory.Dependencies deps,
int nSubport, boolean fCanUseGlobal)
{
if (fCanUseGlobal && s_globalSocketProviderBuilder != null)
{
sId = s_globalSocketProviderBuilder.getId();
deps = s_globalSocketProviderBuilder.getDependencies();
}
return sId == null ? DEFAULT_DATAGRAM_SOCKET_PROVIDER : ensureDatagramSocketProvider(sId, deps, nSubport);
}
/**
* Return an instance of the specified DatagramSocketProvider, creating it if necessary.
*
* @param builder use socket provider id and dependencies from this {@link SocketProviderBuilder}
* @param nSubport subport for {@link DatagramSocketProvider}.
*
* @return a {@link DatagramSocketProvider} configured via method parameters
*/
@SuppressWarnings("unused")
public DatagramSocketProvider getDefaultDatagramSocketProvider(SocketProviderBuilder builder, int nSubport)
{
return getDatagramSocketProvider(builder.getId(), builder.getDependencies(), nSubport, builder.canUseGlobal());
}
/**
* Return an instance of SSLSettings from the specified xml.
*
* @param xml the provider definition, or null for the default provider
*
* @return the sslSettings
*/
@Deprecated
public SSLSettings getSSLSettings(XmlElement xml)
{
String sId = LegacyXmlSocketProviderFactoryDependencies.getProviderId(xml);
SSLSocketProvider.Dependencies depsSSL = getDependencies().getSSLDependencies(sId);
if (depsSSL == null)
{
if (sId.equals(UNNAMED_PROVIDER_ID))
{
LegacyXmlSocketProviderFactoryDependencies depsUnnamed =
new LegacyXmlSocketProviderFactoryDependencies(sId, xml);
depsUnnamed.setSocketProviderFactory(this);
depsSSL = depsUnnamed.getSSLDependencies(sId);
}
else
{
depsSSL = new LegacyXmlSSLSocketProviderDependencies(xml);
}
}
SSLSettings settingsSSL = new SSLSettings();
settingsSSL.setSSLContext(depsSSL.getSSLContext())
.setClientAuth(depsSSL.getClientAuth())
.setHostnameVerifier(depsSSL.getHostnameVerifier())
.setEnabledCipherSuites(depsSSL.getEnabledCipherSuites())
.setEnabledProtocolVersions(depsSSL.getEnabledProtocolVersions());
return settingsSSL;
}
/**
* Return SSLSettings for the specified SocketProvider.
*
* @param socketProvider the socketProvider
*
* @return the sslSettings if the socket provider is an instance of SSLSocketProvider
* or null
*/
public SSLSettings getSSLSettings(SocketProvider socketProvider)
{
if (socketProvider instanceof SSLSocketProvider)
{
SSLSocketProvider providerSSL = (SSLSocketProvider) socketProvider;
SSLSocketProvider.Dependencies depsSSL = providerSSL.getDependencies();
return createSSLSettings(depsSSL);
}
return null;
}
/**
* Return SSLSettings for the specified SocketProviderBuilder.
*
* @param builder the socketProviderBuilder
*
* @return the sslSettings if the socket provider builder has a ssl settings directly or via delegate.
*/
public SSLSettings getSSLSettings(SocketProviderBuilder builder)
{
Dependencies deps = getDependencies();
if (deps != null)
{
SSLSocketProvider.Dependencies depsSSL = getDependencies().getSSLDependencies(builder.getId());
if (depsSSL != null)
{
return createSSLSettings(depsSSL);
}
}
else
{
return getSSLSettings(builder.realize(null, null, null));
}
return null;
}
/**
* Returns the global {@link SocketProviderBuilder} or {@code null}
* if no global provider has been set.
*
* @return the global {@link SocketProviderBuilder} or {@code null}
* if no global provider has been set
*/
public static SocketProviderBuilder getGlobalSocketProviderBuilder()
{
return s_globalSocketProviderBuilder;
}
/**
* Set the global {@link SocketProviderBuilder}.
*
* @param builder the global {@link SocketProviderBuilder}
*/
public static void setGlobalSocketProviderBuilder(SocketProviderBuilder builder)
{
if (builder != null && builder.canUseGlobal())
{
throw new IllegalArgumentException("The global socket provider builder cannot be set to also use the global provider");
}
s_globalSocketProviderBuilder = builder;
}
/**
* Set the global {@link SocketProviderBuilder}.
*
* @param builder the global {@link SocketProviderBuilder}
*/
public static void setGlobalSocketProvider(SocketProviderBuilder builder)
{
SocketProviderFactory.setGlobalSocketProviderBuilder(builder);
}
// ----- Helper methods ---------------------------------------------
/**
* Return the cluster's {@link SocketProviderFactory}.
* @param ctx Cluster operational context
* @param xml socket-provider xml fragment being processed.
* @return the cluster's {@link SocketProviderFactory}
*/
public static SocketProviderFactory getSocketProviderFactory(ProcessingContext ctx, XmlElement xml)
{
// grab the operational context from which we can look up the socket provider factory
OperationalContext ctxOperational = ctx.getCookie(OperationalContext.class);
if (ctxOperational == null)
{
DefaultClusterDependencies deps = ctx.getCookie(DefaultClusterDependencies.class);
if (deps == null)
{
throw new ConfigurationException("Attempted to resolve the OperationalContext in [" + xml
+ "] but it was not defined", "The registered ElementHandler for the <"
+ xml.getName()
+ "> element is not operating in an OperationalContext");
}
return deps.getSocketProviderFactory();
}
else
{
return ctxOperational.getSocketProviderFactory();
}
}
/**
* Return an SSLSettings initialize via {@link SSLSocketProvider.Dependencies}
*
* @param depsSSL SSL Dependencies info
*
* @return a new {@link SSLSettings} initialized via <code>depsSSL</code>
*/
static public SSLSettings createSSLSettings(SSLSocketProvider.Dependencies depsSSL)
{
SSLSettings settingsSSL = new SSLSettings();
settingsSSL.setSSLContext(depsSSL.getSSLContext())
.setClientAuth(depsSSL.getClientAuth())
.setHostnameVerifier(depsSSL.getHostnameVerifier())
.setEnabledCipherSuites(depsSSL.getEnabledCipherSuites())
.setEnabledProtocolVersions(depsSSL.getEnabledProtocolVersions());
return settingsSSL;
}
/**
* Create SocketProvider
*
* @param sId SocketProviderId
*
* @param deps Dependencies for the given SocketProvider
*
* @param nSubport Subport for Demultiplexed socket provider.
* If it is 0, then it implies Multiplexed socket provider.
*
* @return the SocketProvider
*/
protected SocketProvider ensureSocketProvider(String sId, Dependencies deps, int nSubport)
{
SocketProvider provider = null;
String sKey = (nSubport == 0)
? sId
: sId + ":" + nSubport;
if (!sId.equals(UNNAMED_PROVIDER_ID))
{
provider = m_mapSocketProvider.get(sKey);
}
if (provider == null)
{
Dependencies.ProviderType providerType = deps.getProviderType(sId);
if (providerType == null)
{
throw new IllegalArgumentException("Unknown SocketProvider: "
+ sId);
}
switch (providerType)
{
case SYSTEM:
case GRPC:
case TCP:
{
provider = (nSubport == 0)
? TcpSocketProvider.MULTIPLEXED
: new DemultiplexedSocketProvider(TcpSocketProvider.MULTIPLEXED, nSubport);
break;
}
case SSL:
{
SSLSocketProvider.Dependencies depsSSL = deps.getSSLDependencies(sId);
SocketProvider delegate = depsSSL.getDelegateSocketProvider();
if (delegate instanceof SdpSocketProvider)
{
delegate = SdpSocketProvider.MULTIPLEXED;
}
else if (delegate instanceof TcpSocketProvider)
{
delegate = TcpSocketProvider.MULTIPLEXED;
}
// else it is already a multiplexed socket provider
if (nSubport != 0)
{
// replace delegateProvider with its de-multiplexed version
delegate = new DemultiplexedSocketProvider(
(MultiplexedSocketProvider) delegate, nSubport);
}
provider = new SSLSocketProvider(
new SSLSocketProvider.DefaultDependencies(depsSSL).
setDelegate(delegate));
break;
}
case SDP:
{
provider = (nSubport == 0)
? SdpSocketProvider.MULTIPLEXED
: new DemultiplexedSocketProvider(SdpSocketProvider.MULTIPLEXED, nSubport);
break;
}
default: throw new IllegalArgumentException("Unknown Socket provider type: "+sId);
}
m_mapSocketProvider.put(sKey, provider);
}
return provider;
}
/**
* Create a {@link DatagramSocketProvider}
*
* @param sId DatagramSocketProviderId
*
* @param providerDeps Dependencies for the given DatagramSocketProvider
*
* @param nSubport Subport for Demultiplexed socket provider.
* If it is 0, then it implies Multiplexed socket provider.
*
* @return the DatagramSocketProvider
*/
protected DatagramSocketProvider ensureDatagramSocketProvider(String sId,
Dependencies providerDeps, int nSubport)
{
DatagramSocketProvider provider = null;
String sKey = (nSubport == 0)
? sId
: sId + ":" + nSubport;
if (!sId.equals(UNNAMED_PROVIDER_ID))
{
provider = m_mapDatagramSocketProvider.get(sKey);
}
if (provider == null)
{
Dependencies.ProviderType providerType = providerDeps.getProviderType(sId);
if (providerType == null)
{
throw new IllegalArgumentException("Unknown DatagramSocketProvider: "
+ sId);
}
switch (providerType)
{
case SYSTEM:
{
provider = SystemDatagramSocketProvider.INSTANCE;
break;
}
case GRPC:
case TCP:
case SDP:
{
TcpDatagramSocketProvider.DefaultDependencies deps =
new TcpDatagramSocketProvider.DefaultDependencies(
providerDeps.getTcpDatagramSocketDependencies(sId));
deps.setDelegateSocketProvider(
ensureSocketProvider(sId, providerDeps, nSubport));
provider = new TcpDatagramSocketProvider(deps);
break;
}
case SSL:
{
TcpDatagramSocketProvider.DefaultDependencies deps =
new TcpDatagramSocketProvider.DefaultDependencies(
providerDeps.getTcpDatagramSocketDependencies(sId));
deps.setDelegateSocketProvider(
ensureSocketProvider(sId, providerDeps, nSubport));
provider = new TcpDatagramSocketProvider(deps)
{
@Override
public MulticastSocket openMulticastSocket()
throws IOException
{
// We don't have a way to secure this, so we can't provide MulticastSockets
throw new IOException("MulticastSocket is not supported with SSL");
}
@Override
public boolean isSecure()
{
return true;
}
};
break;
}
default: throw new IllegalArgumentException("Unknown Socket provider type: "+sId);
}
m_mapDatagramSocketProvider.put(sKey, provider);
}
return provider;
}
// ----- Object methods -------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public String toString()
{
return "SocketProviderFactory"
+ ", SocketProviderMap= " + m_mapSocketProvider
+ ", DatagramSocketProviderMap=" + m_mapDatagramSocketProvider
+ "}";
}
/**
* Dependencies specifies all dependency requirements of the SocketProviderFactory.
*/
public interface Dependencies
{
/**
* Enumeration of Provider types.
*/
enum ProviderType
{
SYSTEM ("system"),
TCP ("tcp"),
SSL ("ssl"),
SDP ("sdp"),
GRPC ("grpc-insecure");
ProviderType(String name)
{
m_sName = name;
}
public String getName()
{
return m_sName;
}
final String m_sName;
}
/**
* Get the provider type for the given socket provider id
*
* @param sId socket provider id
*
* @return provider type
*/
ProviderType getProviderType(String sId);
/**
* Get the TcpDatagramSocketProvider's dependencies associated with the given socket provider id
*
* @param sId socket provider id
*
* @return TcpDatagramSocketProvider's dependencies
*/
TcpDatagramSocketProvider.Dependencies getTcpDatagramSocketDependencies(String sId);
/**
* Get the SSLSocketProvider's dependencies associated with the given socket provider id
*
* @param sId socket provider id
*
* @return SSLSocketProvider's dependencies
*/
SSLSocketProvider.Dependencies getSSLDependencies(String sId);
/**
* Set the SocketProviderFactory referencing the Dependency object. This is
* needed mainly to resolve delegate socket provider for SSLSocketProvider.
*
* @param factory SocketProviderFactory referencing the Dependency object.
*/
void setSocketProviderFactory(SocketProviderFactory factory);
/**
* Get the associated SocketProviderFactory for the Dependency object.
*
* @return SocketProviderFactory
*/
SocketProviderFactory getSocketProviderFactory();
}
/**
* DefaultDependencies is a basic implementation of the Dependencies
* interface.
* <p>
* Additionally, this class serves as a source of default dependency values.
*/
public static class DefaultDependencies
implements Dependencies
{
/**
* Construct a DefaultSocketProviderDependencies object.
*/
public DefaultDependencies()
{
Map<String, ProviderType> mapProvider = m_mapProvider;
mapProvider.put(ProviderType.SYSTEM.getName(), ProviderType.SYSTEM);
mapProvider.put(ProviderType.TCP.getName(), ProviderType.TCP);
mapProvider.put(ProviderType.SSL.getName(), ProviderType.SSL);
mapProvider.put(ProviderType.SDP.getName(), ProviderType.SDP);
mapProvider.put(ProviderType.GRPC.getName(), ProviderType.GRPC);
m_mapTCPDatagramDependencies.put(ProviderType.TCP.getName(), new TcpDatagramSocketProvider.DefaultDependencies());
}
/**
* {@inheritDoc}
*/
@Override
public TcpDatagramSocketProvider.Dependencies getTcpDatagramSocketDependencies(String sId)
{
return m_mapTCPDatagramDependencies.get(sId);
}
/**
* {@inheritDoc}
*/
@Override
synchronized public SSLSocketProvider.Dependencies getSSLDependencies(String sId)
{
SSLSocketProvider.Dependencies deps = m_mapSSLDependencies.get(sId);
if (deps == null)
{
SSLSocketProviderDependenciesBuilder bldr = m_mapSSLDependenciesBuilder.get(sId);
if (bldr != null)
{
deps = bldr.realize();
addNamedSSLDependencies(sId, deps);
m_mapSSLDependenciesBuilder.remove(sId);
}
}
return deps;
}
/**
* {@inheritDoc}
*/
@Override
public ProviderType getProviderType(String sId)
{
return m_mapProvider.get(sId);
}
/**
* {@inheritDoc}
*/
@Override
public void setSocketProviderFactory(SocketProviderFactory factory)
{
m_providerFactory = factory;
}
/**
* {@inheritDoc}
*/
@Override
public SocketProviderFactory getSocketProviderFactory()
{
SocketProviderFactory factory = m_providerFactory;
if (factory == null)
{
factory = m_providerFactory = new SocketProviderFactory(this);
}
return factory;
}
/**
* Add {@link ProviderType} for <code>sId</code> identifier to this SocketProviderFactory's Dependency mapping.
*
* @param sId provider identifier
* @param type {@link ProviderType}
*/
public void addNamedProviderType(String sId, ProviderType type)
{
m_mapProvider.put(sId, type);
}
/**
* Add {@link SSLSocketProvider.Dependencies} for <code>sId</code> identifier to this SocketProviderFactory's Dependency mapping.
*
* @param sId provider identifier
* @param deps SSL dependencies
*/
public void addNamedSSLDependencies(String sId, SSLSocketProvider.Dependencies deps)
{
m_mapSSLDependencies.put(sId, deps);
}
/**
* Add {@link TcpDatagramSocketProvider.Dependencies} for <code>sId</code> identifier to this SocketProviderFactory's Dependency mapping.
*
* @param sId provider identifier
* @param deps TcpDatagram dependencies
*/
public void addNamedTCPDatagramDependencies(String sId, TcpDatagramSocketProvider.Dependencies deps)
{
m_mapTCPDatagramDependencies.put(sId, deps);
}
public void addNamedSSLDependenciesBuilder(String sId, SSLSocketProviderDependenciesBuilder bldr)
{
m_mapSSLDependenciesBuilder.put(sId, bldr);
}
public Map<String, SSLSocketProviderDependenciesBuilder> getSSLDependenciesBuilderMap()
{
return Collections.unmodifiableMap(m_mapSSLDependenciesBuilder);
}
// ----- data members ---------------------------------------------------
/**
* A map of SSL provider dependencies, keyed by id.
*/
protected Map<String, SSLSocketProvider.Dependencies> m_mapSSLDependencies = new SafeHashMap<>();
/**
* A map of SSL provider dependencies builder, keyed by id.
* Builder is removed from this map when realized SSLDependencies is placed in {@link #m_mapSSLDependencies}
*/
protected Map<String, SSLSocketProviderDependenciesBuilder> m_mapSSLDependenciesBuilder = new SafeHashMap<>();
/**
* A map of TCP Datagram provider dependencies, keyed by id.
*/
protected Map<String, TcpDatagramSocketProvider.Dependencies> m_mapTCPDatagramDependencies = new SafeHashMap<>();
/**
* A map of provider types, keyed by id.
*/
protected Map<String, ProviderType> m_mapProvider = new SafeHashMap<>();
/**
* SocketProviderFactory referencing this Dependency object.
*/
protected SocketProviderFactory m_providerFactory;
}
// ----- data members ---------------------------------------------------
/**
* A map of instantiated socket providers, keyed by id.
*/
protected Map<String, SocketProvider> m_mapSocketProvider = new SafeHashMap<>();
/**
* A map of instantiated datagram socket providers, keyed by id.
*/
protected Map<String, DatagramSocketProvider> m_mapDatagramSocketProvider = new SafeHashMap<>();
/**
* Dependencies
*/
protected Dependencies m_Dependencies;
/**
* A default {@link SocketProviderBuilder}.
*/
private final SocketProviderBuilder f_defaultSocketProviderBuilder;
// ----- constants ------------------------------------------------------
/**
* The factory's default SocketProvider.
*/
public static final SocketProvider DEFAULT_SOCKET_PROVIDER = TcpSocketProvider.MULTIPLEXED;
/**
* The factory's default legacy SocketProvider.
*/
public static final SocketProvider DEFAULT_LEGACY_SOCKET_PROVIDER = TcpSocketProvider.DEMULTIPLEXED;
/**
* The factory's default Datagram SocketProvider.
*/
public static final DatagramSocketProvider DEFAULT_DATAGRAM_SOCKET_PROVIDER = SystemDatagramSocketProvider.INSTANCE;
/**
* Default id for unnamed socket and datagram socket providers
*/
public static final String UNNAMED_PROVIDER_ID = "";
/**
* The global socket provider builder.
*/
private static SocketProviderBuilder s_globalSocketProviderBuilder;
/**
* The name of the system property used to set the global socket provider id.
*/
public static final String PROP_GLOBAL_PROVIDER = "coherence.global.socketprovider";
}
|
apache/hadoop | 35,368 | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesTasks.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
import static org.apache.hadoop.yarn.webapp.WebServicesTestUtils.assertResponseStatusCode;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.StringReader;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.BadRequestException;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.client.WebTarget;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.util.XMLUtils;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.junit.jupiter.api.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.glassfish.jersey.internal.inject.AbstractBinder;
import org.glassfish.jersey.jettison.JettisonFeature;
import org.glassfish.jersey.server.ResourceConfig;
/**
* Test the history server Rest API for getting tasks, a specific task,
* and task counters.
*
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}
* /ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/counters
*/
public class TestHsWebServicesTasks extends JerseyTestBase {
private static Configuration conf = new Configuration();
private static MockHistoryContext appContext;
private static HsWebApp webApp;
private static ApplicationClientProtocol acp = mock(ApplicationClientProtocol.class);
@Override
protected Application configure() {
ResourceConfig config = new ResourceConfig();
config.register(new JerseyBinder());
config.register(HsWebServices.class);
config.register(GenericExceptionHandler.class);
config.register(new JettisonFeature()).register(JAXBContextResolver.class);
return config;
}
private static class JerseyBinder extends AbstractBinder {
@Override
protected void configure() {
appContext = new MockHistoryContext(0, 1, 2, 1);
webApp = mock(HsWebApp.class);
when(webApp.name()).thenReturn("hsmockwebapp");
bind(webApp).to(WebApp.class).named("hsWebApp");
bind(appContext).to(AppContext.class);
bind(appContext).to(HistoryContext.class).named("ctx");
bind(conf).to(Configuration.class).named("conf");
bind(acp).to(ApplicationClientProtocol.class).named("appClient");
final HttpServletResponse response = mock(HttpServletResponse.class);
bind(response).to(HttpServletResponse.class);
final HttpServletRequest request = mock(HttpServletRequest.class);
bind(request).to(HttpServletRequest.class);
}
}
@Test
public void testTasks() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals(2, arr.length(), "incorrect number of elements");
verifyHsTask(arr, jobsMap.get(id), null);
}
}
@Test
public void testTasksDefault() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.request().get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals(2, arr.length(), "incorrect number of elements");
verifyHsTask(arr, jobsMap.get(id), null);
}
}
@Test
public void testTasksSlash() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks/")
.request(MediaType.APPLICATION_JSON).get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json =response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONArray arr = tasks.getJSONArray("task");
assertEquals(2, arr.length(), "incorrect number of elements");
verifyHsTask(arr, jobsMap.get(id), null);
}
}
@Test
public void testTasksXML() throws JSONException, Exception {
WebTarget r = target();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.request(MediaType.APPLICATION_XML).get(Response.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList tasks = dom.getElementsByTagName("tasks");
assertEquals(1, tasks.getLength(), "incorrect number of elements");
NodeList task = dom.getElementsByTagName("task");
verifyHsTaskXML(task, jobsMap.get(id));
}
}
@Test
public void testTasksQueryMap() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String type = "m";
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.queryParam("type", type).request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONObject task = tasks.getJSONObject("task");
JSONArray arr = new JSONArray();
arr.put(task);
assertEquals(1, arr.length(), "incorrect number of elements");
verifyHsTask(arr, jobsMap.get(id), type);
}
}
@Test
public void testTasksQueryReduce() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String type = "r";
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.queryParam("type", type).request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject tasks = json.getJSONObject("tasks");
JSONObject task = tasks.getJSONObject("task");
JSONArray arr = new JSONArray();
arr.put(task);
assertEquals(1, arr.length(), "incorrect number of elements");
verifyHsTask(arr, jobsMap.get(id), type);
}
}
@Test
public void testTasksQueryInvalid() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
// tasktype must be exactly either "m" or "r"
String tasktype = "reduce";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").queryParam("type", tasktype)
.request(MediaType.APPLICATION_JSON).get();
throw new BadRequestException(response);
} catch (BadRequestException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.BAD_REQUEST, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message",
"tasktype must be either m or r", message);
WebServicesTestUtils.checkStringMatch("exception type",
"BadRequestException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.BadRequestException", classname);
}
}
}
@Test
public void testTaskId() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("task");
verifyHsSingleTask(info, task);
}
}
}
@Test
public void testTaskIdSlash() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks")
.path(tid + "/").request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("task");
verifyHsSingleTask(info, task);
}
}
}
@Test
public void testTaskIdDefault() throws Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid).request()
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("task");
verifyHsSingleTask(info, task);
}
}
}
@Test
public void testTaskIdBogus() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "bogustaskid";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : " +
"bogustaskid is not properly formed" +
"\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,11 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdNonExist() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_m_000000";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message",
"task not found with id task_0_0000_m_000000", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdInvalid() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_d_000000";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : " +
"task_0_0000_d_000000 is not properly formed" +
"\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,20 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdInvalid2() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0000_m_000000";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : " +
"task_0000_m_000000 is not properly formed" +
"\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,18 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type", "NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdInvalid3() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
String tid = "task_0_0000_m";
try {
Response response = r.path("ws").path("v1").path("history").path("mapreduce").path("jobs")
.path(jobId).path("tasks").path(tid).request().get();
throw new NotFoundException(response);
} catch (NotFoundException ue) {
Response response = ue.getResponse();
assertResponseStatusCode(Response.Status.NOT_FOUND, response.getStatusInfo());
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject msg = response.readEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals(3, exception.length(), "incorrect number of elements");
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringEqual("exception message",
"TaskId string : " +
"task_0_0000_m is not properly formed" +
"\nReason: java.util.regex.Matcher[pattern=" +
TaskID.TASK_ID_REGEX + " region=0,13 lastmatch=]", message);
WebServicesTestUtils.checkStringMatch("exception type",
"NotFoundException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"org.apache.hadoop.yarn.webapp.NotFoundException", classname);
}
}
}
@Test
public void testTaskIdXML() throws JSONException, Exception {
WebTarget r = target();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.request(MediaType.APPLICATION_XML).get(Response.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("task");
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyHsSingleTaskXML(element, task);
}
}
}
}
public void verifyHsSingleTask(JSONObject info, Task task)
throws JSONException {
assertEquals(9, info.length(), "incorrect number of elements");
verifyTaskGeneric(task, info.getString("id"), info.getString("state"),
info.getString("type"), info.getString("successfulAttempt"),
info.getLong("startTime"), info.getLong("finishTime"),
info.getLong("elapsedTime"), (float) info.getDouble("progress"));
}
public void verifyHsTask(JSONArray arr, Job job, String type)
throws JSONException {
for (Task task : job.getTasks().values()) {
TaskId id = task.getID();
String tid = MRApps.toString(id);
boolean found = false;
if (type != null && task.getType() == MRApps.taskType(type)) {
for (int i = 0; i < arr.length(); i++) {
JSONObject info = arr.getJSONObject(i);
if (tid.matches(info.getString("id"))) {
found = true;
verifyHsSingleTask(info, task);
}
}
assertTrue(found, "task with id: " + tid + " not in web service output");
}
}
}
public void verifyTaskGeneric(Task task, String id, String state,
String type, String successfulAttempt, long startTime, long finishTime,
long elapsedTime, float progress) {
TaskId taskid = task.getID();
String tid = MRApps.toString(taskid);
TaskReport report = task.getReport();
WebServicesTestUtils.checkStringMatch("id", tid, id);
WebServicesTestUtils.checkStringMatch("type", task.getType().toString(),
type);
WebServicesTestUtils.checkStringMatch("state", report.getTaskState()
.toString(), state);
// not easily checked without duplicating logic, just make sure its here
assertNotNull(successfulAttempt, "successfulAttempt null");
assertEquals(report.getStartTime(), startTime, "startTime wrong");
assertEquals(report.getFinishTime(), finishTime, "finishTime wrong");
assertEquals(finishTime - startTime, elapsedTime, "elapsedTime wrong");
assertEquals(report.getProgress() * 100, progress, 1e-3f, "progress wrong");
}
public void verifyHsSingleTaskXML(Element element, Task task) {
verifyTaskGeneric(task, WebServicesTestUtils.getXmlString(element, "id"),
WebServicesTestUtils.getXmlString(element, "state"),
WebServicesTestUtils.getXmlString(element, "type"),
WebServicesTestUtils.getXmlString(element, "successfulAttempt"),
WebServicesTestUtils.getXmlLong(element, "startTime"),
WebServicesTestUtils.getXmlLong(element, "finishTime"),
WebServicesTestUtils.getXmlLong(element, "elapsedTime"),
WebServicesTestUtils.getXmlFloat(element, "progress"));
}
public void verifyHsTaskXML(NodeList nodes, Job job) {
assertEquals(2, nodes.getLength(), "incorrect number of elements");
for (Task task : job.getTasks().values()) {
TaskId id = task.getID();
String tid = MRApps.toString(id);
boolean found = false;
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
if (tid.matches(WebServicesTestUtils.getXmlString(element, "id"))) {
found = true;
verifyHsSingleTaskXML(element, task);
}
}
assertTrue(found, "task with id: " + tid + " not in web service output");
}
}
@Test
public void testTaskIdCounters() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("counters").request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyHsJobTaskCounters(info, task);
}
}
}
@Test
public void testTaskIdCountersSlash() throws Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("counters/").request(MediaType.APPLICATION_JSON)
.get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyHsJobTaskCounters(info, task);
}
}
}
@Test
public void testTaskIdCountersDefault() throws JSONException, Exception {
WebTarget r = targetWithJsonObject();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("counters").request().get(Response.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
JSONObject json = response.readEntity(JSONObject.class);
assertEquals(1, json.length(), "incorrect number of elements");
JSONObject info = json.getJSONObject("jobTaskCounters");
verifyHsJobTaskCounters(info, task);
}
}
}
@Test
public void testJobTaskCountersXML() throws Exception {
WebTarget r = target();
Map<JobId, Job> jobsMap = appContext.getAllJobs();
for (JobId id : jobsMap.keySet()) {
String jobId = MRApps.toString(id);
for (Task task : jobsMap.get(id).getTasks().values()) {
String tid = MRApps.toString(task.getID());
Response response = r.path("ws").path("v1").path("history")
.path("mapreduce").path("jobs").path(jobId).path("tasks").path(tid)
.path("counters").request(MediaType.APPLICATION_XML)
.get(Response.class);
assertEquals(MediaType.APPLICATION_XML_TYPE + ";" + JettyUtils.UTF_8,
response.getMediaType().toString());
String xml = response.readEntity(String.class);
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList info = dom.getElementsByTagName("jobTaskCounters");
verifyHsTaskCountersXML(info, task);
}
}
}
public void verifyHsJobTaskCounters(JSONObject info, Task task)
throws JSONException {
assertEquals(2, info.length(), "incorrect number of elements");
WebServicesTestUtils.checkStringMatch("id", MRApps.toString(task.getID()),
info.getString("id"));
// just do simple verification of fields - not data is correct
// in the fields
JSONArray counterGroups = info.getJSONArray("taskCounterGroup");
for (int i = 0; i < counterGroups.length(); i++) {
JSONObject counterGroup = counterGroups.getJSONObject(i);
String name = counterGroup.getString("counterGroupName");
assertTrue((name != null && !name.isEmpty()), "name not set");
JSONArray counters = counterGroup.getJSONArray("counter");
for (int j = 0; j < counters.length(); j++) {
JSONObject counter = counters.getJSONObject(j);
String counterName = counter.getString("name");
assertTrue((counterName != null && !counterName.isEmpty()), "name not set");
long value = counter.getLong("value");
assertTrue(value >= 0, "value >= 0");
}
}
}
public void verifyHsTaskCountersXML(NodeList nodes, Task task) {
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
WebServicesTestUtils.checkStringMatch("id",
MRApps.toString(task.getID()),
WebServicesTestUtils.getXmlString(element, "id"));
// just do simple verification of fields - not data is correct
// in the fields
NodeList groups = element.getElementsByTagName("taskCounterGroup");
for (int j = 0; j < groups.getLength(); j++) {
Element counters = (Element) groups.item(j);
assertNotNull(counters, "should have counters in the web service info");
String name = WebServicesTestUtils.getXmlString(counters,
"counterGroupName");
assertTrue((name != null && !name.isEmpty()), "name not set");
NodeList counterArr = counters.getElementsByTagName("counter");
for (int z = 0; z < counterArr.getLength(); z++) {
Element counter = (Element) counterArr.item(z);
String counterName = WebServicesTestUtils.getXmlString(counter,
"name");
assertTrue((counterName != null && !counterName.isEmpty()),
"counter name not set");
long value = WebServicesTestUtils.getXmlLong(counter, "value");
assertTrue(value >= 0, "value not >= 0");
}
}
}
}
}
|
openjdk/jdk8 | 36,070 | jdk/src/share/classes/java/text/RuleBasedCollator.java | /*
* Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* (C) Copyright Taligent, Inc. 1996, 1997 - All Rights Reserved
* (C) Copyright IBM Corp. 1996-1998 - All Rights Reserved
*
* The original version of this source code and documentation is copyrighted
* and owned by Taligent, Inc., a wholly-owned subsidiary of IBM. These
* materials are provided under terms of a License Agreement between Taligent
* and Sun. This technology is protected by multiple US and International
* patents. This notice and attribution to Taligent may not be removed.
* Taligent is a registered trademark of Taligent, Inc.
*
*/
package java.text;
import java.text.Normalizer;
import java.util.Vector;
import java.util.Locale;
/**
* The <code>RuleBasedCollator</code> class is a concrete subclass of
* <code>Collator</code> that provides a simple, data-driven, table
* collator. With this class you can create a customized table-based
* <code>Collator</code>. <code>RuleBasedCollator</code> maps
* characters to sort keys.
*
* <p>
* <code>RuleBasedCollator</code> has the following restrictions
* for efficiency (other subclasses may be used for more complex languages) :
* <ol>
* <li>If a special collation rule controlled by a <modifier> is
specified it applies to the whole collator object.
* <li>All non-mentioned characters are at the end of the
* collation order.
* </ol>
*
* <p>
* The collation table is composed of a list of collation rules, where each
* rule is of one of three forms:
* <pre>
* <modifier>
* <relation> <text-argument>
* <reset> <text-argument>
* </pre>
* The definitions of the rule elements is as follows:
* <UL>
* <LI><strong>Text-Argument</strong>: A text-argument is any sequence of
* characters, excluding special characters (that is, common
* whitespace characters [0009-000D, 0020] and rule syntax characters
* [0021-002F, 003A-0040, 005B-0060, 007B-007E]). If those
* characters are desired, you can put them in single quotes
* (e.g. ampersand => '&'). Note that unquoted white space characters
* are ignored; e.g. <code>b c</code> is treated as <code>bc</code>.
* <LI><strong>Modifier</strong>: There are currently two modifiers that
* turn on special collation rules.
* <UL>
* <LI>'@' : Turns on backwards sorting of accents (secondary
* differences), as in French.
* <LI>'!' : Turns on Thai/Lao vowel-consonant swapping. If this
* rule is in force when a Thai vowel of the range
* \U0E40-\U0E44 precedes a Thai consonant of the range
* \U0E01-\U0E2E OR a Lao vowel of the range \U0EC0-\U0EC4
* precedes a Lao consonant of the range \U0E81-\U0EAE then
* the vowel is placed after the consonant for collation
* purposes.
* </UL>
* <p>'@' : Indicates that accents are sorted backwards, as in French.
* <LI><strong>Relation</strong>: The relations are the following:
* <UL>
* <LI>'<' : Greater, as a letter difference (primary)
* <LI>';' : Greater, as an accent difference (secondary)
* <LI>',' : Greater, as a case difference (tertiary)
* <LI>'=' : Equal
* </UL>
* <LI><strong>Reset</strong>: There is a single reset
* which is used primarily for contractions and expansions, but which
* can also be used to add a modification at the end of a set of rules.
* <p>'&' : Indicates that the next rule follows the position to where
* the reset text-argument would be sorted.
* </UL>
*
* <p>
* This sounds more complicated than it is in practice. For example, the
* following are equivalent ways of expressing the same thing:
* <blockquote>
* <pre>
* a < b < c
* a < b & b < c
* a < c & a < b
* </pre>
* </blockquote>
* Notice that the order is important, as the subsequent item goes immediately
* after the text-argument. The following are not equivalent:
* <blockquote>
* <pre>
* a < b & a < c
* a < c & a < b
* </pre>
* </blockquote>
* Either the text-argument must already be present in the sequence, or some
* initial substring of the text-argument must be present. (e.g. "a < b & ae <
* e" is valid since "a" is present in the sequence before "ae" is reset). In
* this latter case, "ae" is not entered and treated as a single character;
* instead, "e" is sorted as if it were expanded to two characters: "a"
* followed by an "e". This difference appears in natural languages: in
* traditional Spanish "ch" is treated as though it contracts to a single
* character (expressed as "c < ch < d"), while in traditional German
* a-umlaut is treated as though it expanded to two characters
* (expressed as "a,A < b,B ... &ae;\u00e3&AE;\u00c3").
* [\u00e3 and \u00c3 are, of course, the escape sequences for a-umlaut.]
* <p>
* <strong>Ignorable Characters</strong>
* <p>
* For ignorable characters, the first rule must start with a relation (the
* examples we have used above are really fragments; "a < b" really should be
* "< a < b"). If, however, the first relation is not "<", then all the all
* text-arguments up to the first "<" are ignorable. For example, ", - < a < b"
* makes "-" an ignorable character, as we saw earlier in the word
* "black-birds". In the samples for different languages, you see that most
* accents are ignorable.
*
* <p><strong>Normalization and Accents</strong>
* <p>
* <code>RuleBasedCollator</code> automatically processes its rule table to
* include both pre-composed and combining-character versions of
* accented characters. Even if the provided rule string contains only
* base characters and separate combining accent characters, the pre-composed
* accented characters matching all canonical combinations of characters from
* the rule string will be entered in the table.
* <p>
* This allows you to use a RuleBasedCollator to compare accented strings
* even when the collator is set to NO_DECOMPOSITION. There are two caveats,
* however. First, if the strings to be collated contain combining
* sequences that may not be in canonical order, you should set the collator to
* CANONICAL_DECOMPOSITION or FULL_DECOMPOSITION to enable sorting of
* combining sequences. Second, if the strings contain characters with
* compatibility decompositions (such as full-width and half-width forms),
* you must use FULL_DECOMPOSITION, since the rule tables only include
* canonical mappings.
*
* <p><strong>Errors</strong>
* <p>
* The following are errors:
* <UL>
* <LI>A text-argument contains unquoted punctuation symbols
* (e.g. "a < b-c < d").
* <LI>A relation or reset character not followed by a text-argument
* (e.g. "a < ,b").
* <LI>A reset where the text-argument (or an initial substring of the
* text-argument) is not already in the sequence.
* (e.g. "a < b & e < f")
* </UL>
* If you produce one of these errors, a <code>RuleBasedCollator</code> throws
* a <code>ParseException</code>.
*
* <p><strong>Examples</strong>
* <p>Simple: "< a < b < c < d"
* <p>Norwegian: "< a, A < b, B < c, C < d, D < e, E < f, F
* < g, G < h, H < i, I < j, J < k, K < l, L
* < m, M < n, N < o, O < p, P < q, Q < r, R
* < s, S < t, T < u, U < v, V < w, W < x, X
* < y, Y < z, Z
* < \u00E6, \u00C6
* < \u00F8, \u00D8
* < \u00E5 = a\u030A, \u00C5 = A\u030A;
* aa, AA"
*
* <p>
* To create a <code>RuleBasedCollator</code> object with specialized
* rules tailored to your needs, you construct the <code>RuleBasedCollator</code>
* with the rules contained in a <code>String</code> object. For example:
* <blockquote>
* <pre>
* String simple = "< a< b< c< d";
* RuleBasedCollator mySimple = new RuleBasedCollator(simple);
* </pre>
* </blockquote>
* Or:
* <blockquote>
* <pre>
* String Norwegian = "< a, A < b, B < c, C < d, D < e, E < f, F < g, G < h, H < i, I" +
* "< j, J < k, K < l, L < m, M < n, N < o, O < p, P < q, Q < r, R" +
* "< s, S < t, T < u, U < v, V < w, W < x, X < y, Y < z, Z" +
* "< \u00E6, \u00C6" + // Latin letter ae & AE
* "< \u00F8, \u00D8" + // Latin letter o & O with stroke
* "< \u00E5 = a\u030A," + // Latin letter a with ring above
* " \u00C5 = A\u030A;" + // Latin letter A with ring above
* " aa, AA";
* RuleBasedCollator myNorwegian = new RuleBasedCollator(Norwegian);
* </pre>
* </blockquote>
*
* <p>
* A new collation rules string can be created by concatenating rules
* strings. For example, the rules returned by {@link #getRules()} could
* be concatenated to combine multiple <code>RuleBasedCollator</code>s.
*
* <p>
* The following example demonstrates how to change the order of
* non-spacing accents,
* <blockquote>
* <pre>
* // old rule
* String oldRules = "=\u0301;\u0300;\u0302;\u0308" // main accents
* + ";\u0327;\u0303;\u0304;\u0305" // main accents
* + ";\u0306;\u0307;\u0309;\u030A" // main accents
* + ";\u030B;\u030C;\u030D;\u030E" // main accents
* + ";\u030F;\u0310;\u0311;\u0312" // main accents
* + "< a , A ; ae, AE ; \u00e6 , \u00c6"
* + "< b , B < c, C < e, E & C < d, D";
* // change the order of accent characters
* String addOn = "& \u0300 ; \u0308 ; \u0302";
* RuleBasedCollator myCollator = new RuleBasedCollator(oldRules + addOn);
* </pre>
* </blockquote>
*
* @see Collator
* @see CollationElementIterator
* @author Helena Shih, Laura Werner, Richard Gillam
*/
public class RuleBasedCollator extends Collator{
// IMPLEMENTATION NOTES: The implementation of the collation algorithm is
// divided across three classes: RuleBasedCollator, RBCollationTables, and
// CollationElementIterator. RuleBasedCollator contains the collator's
// transient state and includes the code that uses the other classes to
// implement comparison and sort-key building. RuleBasedCollator also
// contains the logic to handle French secondary accent sorting.
// A RuleBasedCollator has two CollationElementIterators. State doesn't
// need to be preserved in these objects between calls to compare() or
// getCollationKey(), but the objects persist anyway to avoid wasting extra
// creation time. compare() and getCollationKey() are synchronized to ensure
// thread safety with this scheme. The CollationElementIterator is responsible
// for generating collation elements from strings and returning one element at
// a time (sometimes there's a one-to-many or many-to-one mapping between
// characters and collation elements-- this class handles that).
// CollationElementIterator depends on RBCollationTables, which contains the
// collator's static state. RBCollationTables contains the actual data
// tables specifying the collation order of characters for a particular locale
// or use. It also contains the base logic that CollationElementIterator
// uses to map from characters to collation elements. A single RBCollationTables
// object is shared among all RuleBasedCollators for the same locale, and
// thus by all the CollationElementIterators they create.
/**
* RuleBasedCollator constructor. This takes the table rules and builds
* a collation table out of them. Please see RuleBasedCollator class
* description for more details on the collation rule syntax.
* @see java.util.Locale
* @param rules the collation rules to build the collation table from.
* @exception ParseException A format exception
* will be thrown if the build process of the rules fails. For
* example, build rule "a < ? < d" will cause the constructor to
* throw the ParseException because the '?' is not quoted.
*/
public RuleBasedCollator(String rules) throws ParseException {
this(rules, Collator.CANONICAL_DECOMPOSITION);
}
/**
* RuleBasedCollator constructor. This takes the table rules and builds
* a collation table out of them. Please see RuleBasedCollator class
* description for more details on the collation rule syntax.
* @see java.util.Locale
* @param rules the collation rules to build the collation table from.
* @param decomp the decomposition strength used to build the
* collation table and to perform comparisons.
* @exception ParseException A format exception
* will be thrown if the build process of the rules fails. For
* example, build rule "a < ? < d" will cause the constructor to
* throw the ParseException because the '?' is not quoted.
*/
RuleBasedCollator(String rules, int decomp) throws ParseException {
setStrength(Collator.TERTIARY);
setDecomposition(decomp);
tables = new RBCollationTables(rules, decomp);
}
/**
* "Copy constructor." Used in clone() for performance.
*/
private RuleBasedCollator(RuleBasedCollator that) {
setStrength(that.getStrength());
setDecomposition(that.getDecomposition());
tables = that.tables;
}
/**
* Gets the table-based rules for the collation object.
* @return returns the collation rules that the table collation object
* was created from.
*/
public String getRules()
{
return tables.getRules();
}
/**
* Returns a CollationElementIterator for the given String.
*
* @param source the string to be collated
* @return a {@code CollationElementIterator} object
* @see java.text.CollationElementIterator
*/
public CollationElementIterator getCollationElementIterator(String source) {
return new CollationElementIterator( source, this );
}
/**
* Returns a CollationElementIterator for the given CharacterIterator.
*
* @param source the character iterator to be collated
* @return a {@code CollationElementIterator} object
* @see java.text.CollationElementIterator
* @since 1.2
*/
public CollationElementIterator getCollationElementIterator(
CharacterIterator source) {
return new CollationElementIterator( source, this );
}
/**
* Compares the character data stored in two different strings based on the
* collation rules. Returns information about whether a string is less
* than, greater than or equal to another string in a language.
* This can be overriden in a subclass.
*
* @exception NullPointerException if <code>source</code> or <code>target</code> is null.
*/
public synchronized int compare(String source, String target)
{
if (source == null || target == null) {
throw new NullPointerException();
}
// The basic algorithm here is that we use CollationElementIterators
// to step through both the source and target strings. We compare each
// collation element in the source string against the corresponding one
// in the target, checking for differences.
//
// If a difference is found, we set <result> to LESS or GREATER to
// indicate whether the source string is less or greater than the target.
//
// However, it's not that simple. If we find a tertiary difference
// (e.g. 'A' vs. 'a') near the beginning of a string, it can be
// overridden by a primary difference (e.g. "A" vs. "B") later in
// the string. For example, "AA" < "aB", even though 'A' > 'a'.
//
// To keep track of this, we use strengthResult to keep track of the
// strength of the most significant difference that has been found
// so far. When we find a difference whose strength is greater than
// strengthResult, it overrides the last difference (if any) that
// was found.
int result = Collator.EQUAL;
if (sourceCursor == null) {
sourceCursor = getCollationElementIterator(source);
} else {
sourceCursor.setText(source);
}
if (targetCursor == null) {
targetCursor = getCollationElementIterator(target);
} else {
targetCursor.setText(target);
}
int sOrder = 0, tOrder = 0;
boolean initialCheckSecTer = getStrength() >= Collator.SECONDARY;
boolean checkSecTer = initialCheckSecTer;
boolean checkTertiary = getStrength() >= Collator.TERTIARY;
boolean gets = true, gett = true;
while(true) {
// Get the next collation element in each of the strings, unless
// we've been requested to skip it.
if (gets) sOrder = sourceCursor.next(); else gets = true;
if (gett) tOrder = targetCursor.next(); else gett = true;
// If we've hit the end of one of the strings, jump out of the loop
if ((sOrder == CollationElementIterator.NULLORDER)||
(tOrder == CollationElementIterator.NULLORDER))
break;
int pSOrder = CollationElementIterator.primaryOrder(sOrder);
int pTOrder = CollationElementIterator.primaryOrder(tOrder);
// If there's no difference at this position, we can skip it
if (sOrder == tOrder) {
if (tables.isFrenchSec() && pSOrder != 0) {
if (!checkSecTer) {
// in french, a secondary difference more to the right is stronger,
// so accents have to be checked with each base element
checkSecTer = initialCheckSecTer;
// but tertiary differences are less important than the first
// secondary difference, so checking tertiary remains disabled
checkTertiary = false;
}
}
continue;
}
// Compare primary differences first.
if ( pSOrder != pTOrder )
{
if (sOrder == 0) {
// The entire source element is ignorable.
// Skip to the next source element, but don't fetch another target element.
gett = false;
continue;
}
if (tOrder == 0) {
gets = false;
continue;
}
// The source and target elements aren't ignorable, but it's still possible
// for the primary component of one of the elements to be ignorable....
if (pSOrder == 0) // primary order in source is ignorable
{
// The source's primary is ignorable, but the target's isn't. We treat ignorables
// as a secondary difference, so remember that we found one.
if (checkSecTer) {
result = Collator.GREATER; // (strength is SECONDARY)
checkSecTer = false;
}
// Skip to the next source element, but don't fetch another target element.
gett = false;
}
else if (pTOrder == 0)
{
// record differences - see the comment above.
if (checkSecTer) {
result = Collator.LESS; // (strength is SECONDARY)
checkSecTer = false;
}
// Skip to the next source element, but don't fetch another target element.
gets = false;
} else {
// Neither of the orders is ignorable, and we already know that the primary
// orders are different because of the (pSOrder != pTOrder) test above.
// Record the difference and stop the comparison.
if (pSOrder < pTOrder) {
return Collator.LESS; // (strength is PRIMARY)
} else {
return Collator.GREATER; // (strength is PRIMARY)
}
}
} else { // else of if ( pSOrder != pTOrder )
// primary order is the same, but complete order is different. So there
// are no base elements at this point, only ignorables (Since the strings are
// normalized)
if (checkSecTer) {
// a secondary or tertiary difference may still matter
short secSOrder = CollationElementIterator.secondaryOrder(sOrder);
short secTOrder = CollationElementIterator.secondaryOrder(tOrder);
if (secSOrder != secTOrder) {
// there is a secondary difference
result = (secSOrder < secTOrder) ? Collator.LESS : Collator.GREATER;
// (strength is SECONDARY)
checkSecTer = false;
// (even in french, only the first secondary difference within
// a base character matters)
} else {
if (checkTertiary) {
// a tertiary difference may still matter
short terSOrder = CollationElementIterator.tertiaryOrder(sOrder);
short terTOrder = CollationElementIterator.tertiaryOrder(tOrder);
if (terSOrder != terTOrder) {
// there is a tertiary difference
result = (terSOrder < terTOrder) ? Collator.LESS : Collator.GREATER;
// (strength is TERTIARY)
checkTertiary = false;
}
}
}
} // if (checkSecTer)
} // if ( pSOrder != pTOrder )
} // while()
if (sOrder != CollationElementIterator.NULLORDER) {
// (tOrder must be CollationElementIterator::NULLORDER,
// since this point is only reached when sOrder or tOrder is NULLORDER.)
// The source string has more elements, but the target string hasn't.
do {
if (CollationElementIterator.primaryOrder(sOrder) != 0) {
// We found an additional non-ignorable base character in the source string.
// This is a primary difference, so the source is greater
return Collator.GREATER; // (strength is PRIMARY)
}
else if (CollationElementIterator.secondaryOrder(sOrder) != 0) {
// Additional secondary elements mean the source string is greater
if (checkSecTer) {
result = Collator.GREATER; // (strength is SECONDARY)
checkSecTer = false;
}
}
} while ((sOrder = sourceCursor.next()) != CollationElementIterator.NULLORDER);
}
else if (tOrder != CollationElementIterator.NULLORDER) {
// The target string has more elements, but the source string hasn't.
do {
if (CollationElementIterator.primaryOrder(tOrder) != 0)
// We found an additional non-ignorable base character in the target string.
// This is a primary difference, so the source is less
return Collator.LESS; // (strength is PRIMARY)
else if (CollationElementIterator.secondaryOrder(tOrder) != 0) {
// Additional secondary elements in the target mean the source string is less
if (checkSecTer) {
result = Collator.LESS; // (strength is SECONDARY)
checkSecTer = false;
}
}
} while ((tOrder = targetCursor.next()) != CollationElementIterator.NULLORDER);
}
// For IDENTICAL comparisons, we use a bitwise character comparison
// as a tiebreaker if all else is equal
if (result == 0 && getStrength() == IDENTICAL) {
int mode = getDecomposition();
Normalizer.Form form;
if (mode == CANONICAL_DECOMPOSITION) {
form = Normalizer.Form.NFD;
} else if (mode == FULL_DECOMPOSITION) {
form = Normalizer.Form.NFKD;
} else {
return source.compareTo(target);
}
String sourceDecomposition = Normalizer.normalize(source, form);
String targetDecomposition = Normalizer.normalize(target, form);
return sourceDecomposition.compareTo(targetDecomposition);
}
return result;
}
/**
* Transforms the string into a series of characters that can be compared
* with CollationKey.compareTo. This overrides java.text.Collator.getCollationKey.
* It can be overriden in a subclass.
*/
public synchronized CollationKey getCollationKey(String source)
{
//
// The basic algorithm here is to find all of the collation elements for each
// character in the source string, convert them to a char representation,
// and put them into the collation key. But it's trickier than that.
// Each collation element in a string has three components: primary (A vs B),
// secondary (A vs A-acute), and tertiary (A' vs a); and a primary difference
// at the end of a string takes precedence over a secondary or tertiary
// difference earlier in the string.
//
// To account for this, we put all of the primary orders at the beginning of the
// string, followed by the secondary and tertiary orders, separated by nulls.
//
// Here's a hypothetical example, with the collation element represented as
// a three-digit number, one digit for primary, one for secondary, etc.
//
// String: A a B \u00e9 <--(e-acute)
// Collation Elements: 101 100 201 510
//
// Collation Key: 1125<null>0001<null>1010
//
// To make things even trickier, secondary differences (accent marks) are compared
// starting at the *end* of the string in languages with French secondary ordering.
// But when comparing the accent marks on a single base character, they are compared
// from the beginning. To handle this, we reverse all of the accents that belong
// to each base character, then we reverse the entire string of secondary orderings
// at the end. Taking the same example above, a French collator might return
// this instead:
//
// Collation Key: 1125<null>1000<null>1010
//
if (source == null)
return null;
if (primResult == null) {
primResult = new StringBuffer();
secResult = new StringBuffer();
terResult = new StringBuffer();
} else {
primResult.setLength(0);
secResult.setLength(0);
terResult.setLength(0);
}
int order = 0;
boolean compareSec = (getStrength() >= Collator.SECONDARY);
boolean compareTer = (getStrength() >= Collator.TERTIARY);
int secOrder = CollationElementIterator.NULLORDER;
int terOrder = CollationElementIterator.NULLORDER;
int preSecIgnore = 0;
if (sourceCursor == null) {
sourceCursor = getCollationElementIterator(source);
} else {
sourceCursor.setText(source);
}
// walk through each character
while ((order = sourceCursor.next()) !=
CollationElementIterator.NULLORDER)
{
secOrder = CollationElementIterator.secondaryOrder(order);
terOrder = CollationElementIterator.tertiaryOrder(order);
if (!CollationElementIterator.isIgnorable(order))
{
primResult.append((char) (CollationElementIterator.primaryOrder(order)
+ COLLATIONKEYOFFSET));
if (compareSec) {
//
// accumulate all of the ignorable/secondary characters attached
// to a given base character
//
if (tables.isFrenchSec() && preSecIgnore < secResult.length()) {
//
// We're doing reversed secondary ordering and we've hit a base
// (non-ignorable) character. Reverse any secondary orderings
// that applied to the last base character. (see block comment above.)
//
RBCollationTables.reverse(secResult, preSecIgnore, secResult.length());
}
// Remember where we are in the secondary orderings - this is how far
// back to go if we need to reverse them later.
secResult.append((char)(secOrder+ COLLATIONKEYOFFSET));
preSecIgnore = secResult.length();
}
if (compareTer) {
terResult.append((char)(terOrder+ COLLATIONKEYOFFSET));
}
}
else
{
if (compareSec && secOrder != 0)
secResult.append((char)
(secOrder + tables.getMaxSecOrder() + COLLATIONKEYOFFSET));
if (compareTer && terOrder != 0)
terResult.append((char)
(terOrder + tables.getMaxTerOrder() + COLLATIONKEYOFFSET));
}
}
if (tables.isFrenchSec())
{
if (preSecIgnore < secResult.length()) {
// If we've accumulated any secondary characters after the last base character,
// reverse them.
RBCollationTables.reverse(secResult, preSecIgnore, secResult.length());
}
// And now reverse the entire secResult to get French secondary ordering.
RBCollationTables.reverse(secResult, 0, secResult.length());
}
primResult.append((char)0);
secResult.append((char)0);
secResult.append(terResult.toString());
primResult.append(secResult.toString());
if (getStrength() == IDENTICAL) {
primResult.append((char)0);
int mode = getDecomposition();
if (mode == CANONICAL_DECOMPOSITION) {
primResult.append(Normalizer.normalize(source, Normalizer.Form.NFD));
} else if (mode == FULL_DECOMPOSITION) {
primResult.append(Normalizer.normalize(source, Normalizer.Form.NFKD));
} else {
primResult.append(source);
}
}
return new RuleBasedCollationKey(source, primResult.toString());
}
/**
* Standard override; no change in semantics.
*/
public Object clone() {
// if we know we're not actually a subclass of RuleBasedCollator
// (this class really should have been made final), bypass
// Object.clone() and use our "copy constructor". This is faster.
if (getClass() == RuleBasedCollator.class) {
return new RuleBasedCollator(this);
}
else {
RuleBasedCollator result = (RuleBasedCollator) super.clone();
result.primResult = null;
result.secResult = null;
result.terResult = null;
result.sourceCursor = null;
result.targetCursor = null;
return result;
}
}
/**
* Compares the equality of two collation objects.
* @param obj the table-based collation object to be compared with this.
* @return true if the current table-based collation object is the same
* as the table-based collation object obj; false otherwise.
*/
public boolean equals(Object obj) {
if (obj == null) return false;
if (!super.equals(obj)) return false; // super does class check
RuleBasedCollator other = (RuleBasedCollator) obj;
// all other non-transient information is also contained in rules.
return (getRules().equals(other.getRules()));
}
/**
* Generates the hash code for the table-based collation object
*/
public int hashCode() {
return getRules().hashCode();
}
/**
* Allows CollationElementIterator access to the tables object
*/
RBCollationTables getTables() {
return tables;
}
// ==============================================================
// private
// ==============================================================
final static int CHARINDEX = 0x70000000; // need look up in .commit()
final static int EXPANDCHARINDEX = 0x7E000000; // Expand index follows
final static int CONTRACTCHARINDEX = 0x7F000000; // contract indexes follow
final static int UNMAPPED = 0xFFFFFFFF;
private final static int COLLATIONKEYOFFSET = 1;
private RBCollationTables tables = null;
// Internal objects that are cached across calls so that they don't have to
// be created/destroyed on every call to compare() and getCollationKey()
private StringBuffer primResult = null;
private StringBuffer secResult = null;
private StringBuffer terResult = null;
private CollationElementIterator sourceCursor = null;
private CollationElementIterator targetCursor = null;
}
|
googleapis/google-cloud-java | 35,673 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/CheckTrialEarlyStoppingStateMetatdata.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/vizier_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* This message will be placed in the metadata field of a
* google.longrunning.Operation associated with a CheckTrialEarlyStoppingState
* request.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata}
*/
public final class CheckTrialEarlyStoppingStateMetatdata
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata)
CheckTrialEarlyStoppingStateMetatdataOrBuilder {
private static final long serialVersionUID = 0L;
// Use CheckTrialEarlyStoppingStateMetatdata.newBuilder() to construct.
private CheckTrialEarlyStoppingStateMetatdata(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CheckTrialEarlyStoppingStateMetatdata() {
study_ = "";
trial_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CheckTrialEarlyStoppingStateMetatdata();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1_CheckTrialEarlyStoppingStateMetatdata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1_CheckTrialEarlyStoppingStateMetatdata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata.class,
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata.Builder.class);
}
private int bitField0_;
public static final int GENERIC_METADATA_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_;
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
@java.lang.Override
public boolean hasGenericMetadata() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
public static final int STUDY_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object study_ = "";
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @return The study.
*/
@java.lang.Override
public java.lang.String getStudy() {
java.lang.Object ref = study_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
study_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @return The bytes for study.
*/
@java.lang.Override
public com.google.protobuf.ByteString getStudyBytes() {
java.lang.Object ref = study_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
study_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TRIAL_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object trial_ = "";
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @return The trial.
*/
@java.lang.Override
public java.lang.String getTrial() {
java.lang.Object ref = trial_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
trial_ = s;
return s;
}
}
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @return The bytes for trial.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTrialBytes() {
java.lang.Object ref = trial_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
trial_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getGenericMetadata());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(study_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, study_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(trial_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, trial_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenericMetadata());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(study_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, study_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(trial_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, trial_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata other =
(com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata) obj;
if (hasGenericMetadata() != other.hasGenericMetadata()) return false;
if (hasGenericMetadata()) {
if (!getGenericMetadata().equals(other.getGenericMetadata())) return false;
}
if (!getStudy().equals(other.getStudy())) return false;
if (!getTrial().equals(other.getTrial())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasGenericMetadata()) {
hash = (37 * hash) + GENERIC_METADATA_FIELD_NUMBER;
hash = (53 * hash) + getGenericMetadata().hashCode();
}
hash = (37 * hash) + STUDY_FIELD_NUMBER;
hash = (53 * hash) + getStudy().hashCode();
hash = (37 * hash) + TRIAL_FIELD_NUMBER;
hash = (53 * hash) + getTrial().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* This message will be placed in the metadata field of a
* google.longrunning.Operation associated with a CheckTrialEarlyStoppingState
* request.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata)
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1_CheckTrialEarlyStoppingStateMetatdata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1_CheckTrialEarlyStoppingStateMetatdata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata.class,
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata.Builder.class);
}
// Construct using
// com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getGenericMetadataFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
genericMetadata_ = null;
if (genericMetadataBuilder_ != null) {
genericMetadataBuilder_.dispose();
genericMetadataBuilder_ = null;
}
study_ = "";
trial_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.VizierServiceProto
.internal_static_google_cloud_aiplatform_v1_CheckTrialEarlyStoppingStateMetatdata_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata build() {
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata buildPartial() {
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata result =
new com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.genericMetadata_ =
genericMetadataBuilder_ == null ? genericMetadata_ : genericMetadataBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.study_ = study_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.trial_ = trial_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata) {
return mergeFrom(
(com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata other) {
if (other
== com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata
.getDefaultInstance()) return this;
if (other.hasGenericMetadata()) {
mergeGenericMetadata(other.getGenericMetadata());
}
if (!other.getStudy().isEmpty()) {
study_ = other.study_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getTrial().isEmpty()) {
trial_ = other.trial_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getGenericMetadataFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
study_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
trial_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1.GenericOperationMetadata genericMetadata_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>
genericMetadataBuilder_;
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return Whether the genericMetadata field is set.
*/
public boolean hasGenericMetadata() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*
* @return The genericMetadata.
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadata getGenericMetadata() {
if (genericMetadataBuilder_ == null) {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
} else {
return genericMetadataBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
genericMetadata_ = value;
} else {
genericMetadataBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder setGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder builderForValue) {
if (genericMetadataBuilder_ == null) {
genericMetadata_ = builderForValue.build();
} else {
genericMetadataBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder mergeGenericMetadata(
com.google.cloud.aiplatform.v1.GenericOperationMetadata value) {
if (genericMetadataBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& genericMetadata_ != null
&& genericMetadata_
!= com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()) {
getGenericMetadataBuilder().mergeFrom(value);
} else {
genericMetadata_ = value;
}
} else {
genericMetadataBuilder_.mergeFrom(value);
}
if (genericMetadata_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public Builder clearGenericMetadata() {
bitField0_ = (bitField0_ & ~0x00000001);
genericMetadata_ = null;
if (genericMetadataBuilder_ != null) {
genericMetadataBuilder_.dispose();
genericMetadataBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder
getGenericMetadataBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getGenericMetadataFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
public com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder
getGenericMetadataOrBuilder() {
if (genericMetadataBuilder_ != null) {
return genericMetadataBuilder_.getMessageOrBuilder();
} else {
return genericMetadata_ == null
? com.google.cloud.aiplatform.v1.GenericOperationMetadata.getDefaultInstance()
: genericMetadata_;
}
}
/**
*
*
* <pre>
* Operation metadata for suggesting Trials.
* </pre>
*
* <code>.google.cloud.aiplatform.v1.GenericOperationMetadata generic_metadata = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>
getGenericMetadataFieldBuilder() {
if (genericMetadataBuilder_ == null) {
genericMetadataBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.GenericOperationMetadata,
com.google.cloud.aiplatform.v1.GenericOperationMetadata.Builder,
com.google.cloud.aiplatform.v1.GenericOperationMetadataOrBuilder>(
getGenericMetadata(), getParentForChildren(), isClean());
genericMetadata_ = null;
}
return genericMetadataBuilder_;
}
private java.lang.Object study_ = "";
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @return The study.
*/
public java.lang.String getStudy() {
java.lang.Object ref = study_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
study_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @return The bytes for study.
*/
public com.google.protobuf.ByteString getStudyBytes() {
java.lang.Object ref = study_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
study_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @param value The study to set.
* @return This builder for chaining.
*/
public Builder setStudy(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
study_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearStudy() {
study_ = getDefaultInstance().getStudy();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the Study that the Trial belongs to.
* </pre>
*
* <code>string study = 2;</code>
*
* @param value The bytes for study to set.
* @return This builder for chaining.
*/
public Builder setStudyBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
study_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object trial_ = "";
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @return The trial.
*/
public java.lang.String getTrial() {
java.lang.Object ref = trial_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
trial_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @return The bytes for trial.
*/
public com.google.protobuf.ByteString getTrialBytes() {
java.lang.Object ref = trial_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
trial_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @param value The trial to set.
* @return This builder for chaining.
*/
public Builder setTrial(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
trial_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearTrial() {
trial_ = getDefaultInstance().getTrial();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The Trial name.
* </pre>
*
* <code>string trial = 3;</code>
*
* @param value The bytes for trial to set.
* @return This builder for chaining.
*/
public Builder setTrialBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
trial_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata)
private static final com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata();
}
public static com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CheckTrialEarlyStoppingStateMetatdata> PARSER =
new com.google.protobuf.AbstractParser<CheckTrialEarlyStoppingStateMetatdata>() {
@java.lang.Override
public CheckTrialEarlyStoppingStateMetatdata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CheckTrialEarlyStoppingStateMetatdata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CheckTrialEarlyStoppingStateMetatdata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.CheckTrialEarlyStoppingStateMetatdata
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/sdk-platform-java | 35,697 | java-common-protos/proto-google-common-protos/src/main/java/com/google/cloud/location/ListLocationsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/location/locations.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.location;
/**
*
*
* <pre>
* The response message for [Locations.ListLocations][google.cloud.location.Locations.ListLocations].
* </pre>
*
* Protobuf type {@code google.cloud.location.ListLocationsResponse}
*/
public final class ListLocationsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.location.ListLocationsResponse)
ListLocationsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListLocationsResponse.newBuilder() to construct.
private ListLocationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListLocationsResponse() {
locations_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListLocationsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.location.LocationsProto
.internal_static_google_cloud_location_ListLocationsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.location.LocationsProto
.internal_static_google_cloud_location_ListLocationsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.location.ListLocationsResponse.class,
com.google.cloud.location.ListLocationsResponse.Builder.class);
}
public static final int LOCATIONS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.location.Location> locations_;
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.location.Location> getLocationsList() {
return locations_;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.location.LocationOrBuilder>
getLocationsOrBuilderList() {
return locations_;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
@java.lang.Override
public int getLocationsCount() {
return locations_.size();
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
@java.lang.Override
public com.google.cloud.location.Location getLocations(int index) {
return locations_.get(index);
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
@java.lang.Override
public com.google.cloud.location.LocationOrBuilder getLocationsOrBuilder(int index) {
return locations_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < locations_.size(); i++) {
output.writeMessage(1, locations_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < locations_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, locations_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.location.ListLocationsResponse)) {
return super.equals(obj);
}
com.google.cloud.location.ListLocationsResponse other =
(com.google.cloud.location.ListLocationsResponse) obj;
if (!getLocationsList().equals(other.getLocationsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getLocationsCount() > 0) {
hash = (37 * hash) + LOCATIONS_FIELD_NUMBER;
hash = (53 * hash) + getLocationsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.location.ListLocationsResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.location.ListLocationsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.location.ListLocationsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.location.ListLocationsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.location.ListLocationsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.location.ListLocationsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.location.ListLocationsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.location.ListLocationsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.location.ListLocationsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.location.ListLocationsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.location.ListLocationsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.location.ListLocationsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.location.ListLocationsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for [Locations.ListLocations][google.cloud.location.Locations.ListLocations].
* </pre>
*
* Protobuf type {@code google.cloud.location.ListLocationsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.location.ListLocationsResponse)
com.google.cloud.location.ListLocationsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.location.LocationsProto
.internal_static_google_cloud_location_ListLocationsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.location.LocationsProto
.internal_static_google_cloud_location_ListLocationsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.location.ListLocationsResponse.class,
com.google.cloud.location.ListLocationsResponse.Builder.class);
}
// Construct using com.google.cloud.location.ListLocationsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (locationsBuilder_ == null) {
locations_ = java.util.Collections.emptyList();
} else {
locations_ = null;
locationsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.location.LocationsProto
.internal_static_google_cloud_location_ListLocationsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.location.ListLocationsResponse getDefaultInstanceForType() {
return com.google.cloud.location.ListLocationsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.location.ListLocationsResponse build() {
com.google.cloud.location.ListLocationsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.location.ListLocationsResponse buildPartial() {
com.google.cloud.location.ListLocationsResponse result =
new com.google.cloud.location.ListLocationsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.location.ListLocationsResponse result) {
if (locationsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
locations_ = java.util.Collections.unmodifiableList(locations_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.locations_ = locations_;
} else {
result.locations_ = locationsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.location.ListLocationsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.location.ListLocationsResponse) {
return mergeFrom((com.google.cloud.location.ListLocationsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.location.ListLocationsResponse other) {
if (other == com.google.cloud.location.ListLocationsResponse.getDefaultInstance())
return this;
if (locationsBuilder_ == null) {
if (!other.locations_.isEmpty()) {
if (locations_.isEmpty()) {
locations_ = other.locations_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureLocationsIsMutable();
locations_.addAll(other.locations_);
}
onChanged();
}
} else {
if (!other.locations_.isEmpty()) {
if (locationsBuilder_.isEmpty()) {
locationsBuilder_.dispose();
locationsBuilder_ = null;
locations_ = other.locations_;
bitField0_ = (bitField0_ & ~0x00000001);
locationsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getLocationsFieldBuilder()
: null;
} else {
locationsBuilder_.addAllMessages(other.locations_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.location.Location m =
input.readMessage(
com.google.cloud.location.Location.parser(), extensionRegistry);
if (locationsBuilder_ == null) {
ensureLocationsIsMutable();
locations_.add(m);
} else {
locationsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.location.Location> locations_ =
java.util.Collections.emptyList();
private void ensureLocationsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
locations_ = new java.util.ArrayList<com.google.cloud.location.Location>(locations_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.location.Location,
com.google.cloud.location.Location.Builder,
com.google.cloud.location.LocationOrBuilder>
locationsBuilder_;
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public java.util.List<com.google.cloud.location.Location> getLocationsList() {
if (locationsBuilder_ == null) {
return java.util.Collections.unmodifiableList(locations_);
} else {
return locationsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public int getLocationsCount() {
if (locationsBuilder_ == null) {
return locations_.size();
} else {
return locationsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public com.google.cloud.location.Location getLocations(int index) {
if (locationsBuilder_ == null) {
return locations_.get(index);
} else {
return locationsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public Builder setLocations(int index, com.google.cloud.location.Location value) {
if (locationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLocationsIsMutable();
locations_.set(index, value);
onChanged();
} else {
locationsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public Builder setLocations(
int index, com.google.cloud.location.Location.Builder builderForValue) {
if (locationsBuilder_ == null) {
ensureLocationsIsMutable();
locations_.set(index, builderForValue.build());
onChanged();
} else {
locationsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public Builder addLocations(com.google.cloud.location.Location value) {
if (locationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLocationsIsMutable();
locations_.add(value);
onChanged();
} else {
locationsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public Builder addLocations(int index, com.google.cloud.location.Location value) {
if (locationsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureLocationsIsMutable();
locations_.add(index, value);
onChanged();
} else {
locationsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public Builder addLocations(com.google.cloud.location.Location.Builder builderForValue) {
if (locationsBuilder_ == null) {
ensureLocationsIsMutable();
locations_.add(builderForValue.build());
onChanged();
} else {
locationsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public Builder addLocations(
int index, com.google.cloud.location.Location.Builder builderForValue) {
if (locationsBuilder_ == null) {
ensureLocationsIsMutable();
locations_.add(index, builderForValue.build());
onChanged();
} else {
locationsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public Builder addAllLocations(
java.lang.Iterable<? extends com.google.cloud.location.Location> values) {
if (locationsBuilder_ == null) {
ensureLocationsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, locations_);
onChanged();
} else {
locationsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public Builder clearLocations() {
if (locationsBuilder_ == null) {
locations_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
locationsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public Builder removeLocations(int index) {
if (locationsBuilder_ == null) {
ensureLocationsIsMutable();
locations_.remove(index);
onChanged();
} else {
locationsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public com.google.cloud.location.Location.Builder getLocationsBuilder(int index) {
return getLocationsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public com.google.cloud.location.LocationOrBuilder getLocationsOrBuilder(int index) {
if (locationsBuilder_ == null) {
return locations_.get(index);
} else {
return locationsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public java.util.List<? extends com.google.cloud.location.LocationOrBuilder>
getLocationsOrBuilderList() {
if (locationsBuilder_ != null) {
return locationsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(locations_);
}
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public com.google.cloud.location.Location.Builder addLocationsBuilder() {
return getLocationsFieldBuilder()
.addBuilder(com.google.cloud.location.Location.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public com.google.cloud.location.Location.Builder addLocationsBuilder(int index) {
return getLocationsFieldBuilder()
.addBuilder(index, com.google.cloud.location.Location.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of locations that matches the specified filter in the request.
* </pre>
*
* <code>repeated .google.cloud.location.Location locations = 1;</code>
*/
public java.util.List<com.google.cloud.location.Location.Builder> getLocationsBuilderList() {
return getLocationsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.location.Location,
com.google.cloud.location.Location.Builder,
com.google.cloud.location.LocationOrBuilder>
getLocationsFieldBuilder() {
if (locationsBuilder_ == null) {
locationsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.location.Location,
com.google.cloud.location.Location.Builder,
com.google.cloud.location.LocationOrBuilder>(
locations_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
locations_ = null;
}
return locationsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The standard List next-page token.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.location.ListLocationsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.location.ListLocationsResponse)
private static final com.google.cloud.location.ListLocationsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.location.ListLocationsResponse();
}
public static com.google.cloud.location.ListLocationsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListLocationsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListLocationsResponse>() {
@java.lang.Override
public ListLocationsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListLocationsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListLocationsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.location.ListLocationsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,657 | java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/ListReasoningEnginesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1beta1/reasoning_engine_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1beta1;
/**
*
*
* <pre>
* Request message for
* [ReasoningEngineService.ListReasoningEngines][google.cloud.aiplatform.v1beta1.ReasoningEngineService.ListReasoningEngines].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest}
*/
public final class ListReasoningEnginesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest)
ListReasoningEnginesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListReasoningEnginesRequest.newBuilder() to construct.
private ListReasoningEnginesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListReasoningEnginesRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListReasoningEnginesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ReasoningEngineServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListReasoningEnginesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ReasoningEngineServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListReasoningEnginesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest.class,
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the Location to list the ReasoningEngines
* from. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Location to list the ReasoningEngines
* from. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The standard list page size.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest other =
(com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [ReasoningEngineService.ListReasoningEngines][google.cloud.aiplatform.v1beta1.ReasoningEngineService.ListReasoningEngines].
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest)
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1beta1.ReasoningEngineServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListReasoningEnginesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1beta1.ReasoningEngineServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListReasoningEnginesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest.class,
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1beta1.ReasoningEngineServiceProto
.internal_static_google_cloud_aiplatform_v1beta1_ListReasoningEnginesRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest build() {
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest buildPartial() {
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest result =
new com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest) {
return mergeFrom((com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest other) {
if (other
== com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The resource name of the Location to list the ReasoningEngines
* from. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Location to list the ReasoningEngines
* from. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name of the Location to list the ReasoningEngines
* from. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the Location to list the ReasoningEngines
* from. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name of the Location to list the ReasoningEngines
* from. Format: `projects/{project}/locations/{location}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The standard list filter.
* More detail in [AIP-160](https://google.aip.dev/160).
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The standard list page size.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The standard list page size.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The standard list page size.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The standard list page token.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest)
private static final com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest();
}
public static com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListReasoningEnginesRequest> PARSER =
new com.google.protobuf.AbstractParser<ListReasoningEnginesRequest>() {
@java.lang.Override
public ListReasoningEnginesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListReasoningEnginesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListReasoningEnginesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1beta1.ListReasoningEnginesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,682 | java-alloydb/proto-google-cloud-alloydb-v1/src/main/java/com/google/cloud/alloydb/v1/MigrationSource.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/alloydb/v1/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.alloydb.v1;
/**
*
*
* <pre>
* Subset of the source instance configuration that is available when reading
* the cluster resource.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1.MigrationSource}
*/
public final class MigrationSource extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1.MigrationSource)
MigrationSourceOrBuilder {
private static final long serialVersionUID = 0L;
// Use MigrationSource.newBuilder() to construct.
private MigrationSource(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MigrationSource() {
hostPort_ = "";
referenceId_ = "";
sourceType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MigrationSource();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1.ResourcesProto
.internal_static_google_cloud_alloydb_v1_MigrationSource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1.ResourcesProto
.internal_static_google_cloud_alloydb_v1_MigrationSource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1.MigrationSource.class,
com.google.cloud.alloydb.v1.MigrationSource.Builder.class);
}
/**
*
*
* <pre>
* Denote the type of migration source that created this cluster.
* </pre>
*
* Protobuf enum {@code google.cloud.alloydb.v1.MigrationSource.MigrationSourceType}
*/
public enum MigrationSourceType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Migration source is unknown.
* </pre>
*
* <code>MIGRATION_SOURCE_TYPE_UNSPECIFIED = 0;</code>
*/
MIGRATION_SOURCE_TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* DMS source means the cluster was created via DMS migration job.
* </pre>
*
* <code>DMS = 1;</code>
*/
DMS(1),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Migration source is unknown.
* </pre>
*
* <code>MIGRATION_SOURCE_TYPE_UNSPECIFIED = 0;</code>
*/
public static final int MIGRATION_SOURCE_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* DMS source means the cluster was created via DMS migration job.
* </pre>
*
* <code>DMS = 1;</code>
*/
public static final int DMS_VALUE = 1;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static MigrationSourceType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static MigrationSourceType forNumber(int value) {
switch (value) {
case 0:
return MIGRATION_SOURCE_TYPE_UNSPECIFIED;
case 1:
return DMS;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<MigrationSourceType>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<MigrationSourceType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<MigrationSourceType>() {
public MigrationSourceType findValueByNumber(int number) {
return MigrationSourceType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.alloydb.v1.MigrationSource.getDescriptor().getEnumTypes().get(0);
}
private static final MigrationSourceType[] VALUES = values();
public static MigrationSourceType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private MigrationSourceType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.alloydb.v1.MigrationSource.MigrationSourceType)
}
public static final int HOST_PORT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object hostPort_ = "";
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The hostPort.
*/
@java.lang.Override
public java.lang.String getHostPort() {
java.lang.Object ref = hostPort_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
hostPort_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for hostPort.
*/
@java.lang.Override
public com.google.protobuf.ByteString getHostPortBytes() {
java.lang.Object ref = hostPort_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
hostPort_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REFERENCE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object referenceId_ = "";
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The referenceId.
*/
@java.lang.Override
public java.lang.String getReferenceId() {
java.lang.Object ref = referenceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
referenceId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for referenceId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getReferenceIdBytes() {
java.lang.Object ref = referenceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
referenceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SOURCE_TYPE_FIELD_NUMBER = 3;
private int sourceType_ = 0;
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for sourceType.
*/
@java.lang.Override
public int getSourceTypeValue() {
return sourceType_;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The sourceType.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType getSourceType() {
com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType result =
com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType.forNumber(sourceType_);
return result == null
? com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType.UNRECOGNIZED
: result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostPort_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, hostPort_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(referenceId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, referenceId_);
}
if (sourceType_
!= com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType
.MIGRATION_SOURCE_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(3, sourceType_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(hostPort_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, hostPort_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(referenceId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, referenceId_);
}
if (sourceType_
!= com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType
.MIGRATION_SOURCE_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, sourceType_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.alloydb.v1.MigrationSource)) {
return super.equals(obj);
}
com.google.cloud.alloydb.v1.MigrationSource other =
(com.google.cloud.alloydb.v1.MigrationSource) obj;
if (!getHostPort().equals(other.getHostPort())) return false;
if (!getReferenceId().equals(other.getReferenceId())) return false;
if (sourceType_ != other.sourceType_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + HOST_PORT_FIELD_NUMBER;
hash = (53 * hash) + getHostPort().hashCode();
hash = (37 * hash) + REFERENCE_ID_FIELD_NUMBER;
hash = (53 * hash) + getReferenceId().hashCode();
hash = (37 * hash) + SOURCE_TYPE_FIELD_NUMBER;
hash = (53 * hash) + sourceType_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.alloydb.v1.MigrationSource parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.alloydb.v1.MigrationSource parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.alloydb.v1.MigrationSource prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Subset of the source instance configuration that is available when reading
* the cluster resource.
* </pre>
*
* Protobuf type {@code google.cloud.alloydb.v1.MigrationSource}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1.MigrationSource)
com.google.cloud.alloydb.v1.MigrationSourceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.alloydb.v1.ResourcesProto
.internal_static_google_cloud_alloydb_v1_MigrationSource_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.alloydb.v1.ResourcesProto
.internal_static_google_cloud_alloydb_v1_MigrationSource_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.alloydb.v1.MigrationSource.class,
com.google.cloud.alloydb.v1.MigrationSource.Builder.class);
}
// Construct using com.google.cloud.alloydb.v1.MigrationSource.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
hostPort_ = "";
referenceId_ = "";
sourceType_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.alloydb.v1.ResourcesProto
.internal_static_google_cloud_alloydb_v1_MigrationSource_descriptor;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.MigrationSource getDefaultInstanceForType() {
return com.google.cloud.alloydb.v1.MigrationSource.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.alloydb.v1.MigrationSource build() {
com.google.cloud.alloydb.v1.MigrationSource result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.MigrationSource buildPartial() {
com.google.cloud.alloydb.v1.MigrationSource result =
new com.google.cloud.alloydb.v1.MigrationSource(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.alloydb.v1.MigrationSource result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.hostPort_ = hostPort_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.referenceId_ = referenceId_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.sourceType_ = sourceType_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.alloydb.v1.MigrationSource) {
return mergeFrom((com.google.cloud.alloydb.v1.MigrationSource) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.alloydb.v1.MigrationSource other) {
if (other == com.google.cloud.alloydb.v1.MigrationSource.getDefaultInstance()) return this;
if (!other.getHostPort().isEmpty()) {
hostPort_ = other.hostPort_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getReferenceId().isEmpty()) {
referenceId_ = other.referenceId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.sourceType_ != 0) {
setSourceTypeValue(other.getSourceTypeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
hostPort_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
referenceId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
sourceType_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 24
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object hostPort_ = "";
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The hostPort.
*/
public java.lang.String getHostPort() {
java.lang.Object ref = hostPort_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
hostPort_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for hostPort.
*/
public com.google.protobuf.ByteString getHostPortBytes() {
java.lang.Object ref = hostPort_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
hostPort_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The hostPort to set.
* @return This builder for chaining.
*/
public Builder setHostPort(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
hostPort_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearHostPort() {
hostPort_ = getDefaultInstance().getHostPort();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. The host and port of the on-premises instance in host:port
* format
* </pre>
*
* <code>string host_port = 1 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for hostPort to set.
* @return This builder for chaining.
*/
public Builder setHostPortBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
hostPort_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object referenceId_ = "";
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The referenceId.
*/
public java.lang.String getReferenceId() {
java.lang.Object ref = referenceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
referenceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return The bytes for referenceId.
*/
public com.google.protobuf.ByteString getReferenceIdBytes() {
java.lang.Object ref = referenceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
referenceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The referenceId to set.
* @return This builder for chaining.
*/
public Builder setReferenceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
referenceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @return This builder for chaining.
*/
public Builder clearReferenceId() {
referenceId_ = getDefaultInstance().getReferenceId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Place holder for the external source identifier(e.g DMS job
* name) that created the cluster.
* </pre>
*
* <code>string reference_id = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code>
*
* @param value The bytes for referenceId to set.
* @return This builder for chaining.
*/
public Builder setReferenceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
referenceId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int sourceType_ = 0;
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The enum numeric value on the wire for sourceType.
*/
@java.lang.Override
public int getSourceTypeValue() {
return sourceType_;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The enum numeric value on the wire for sourceType to set.
* @return This builder for chaining.
*/
public Builder setSourceTypeValue(int value) {
sourceType_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return The sourceType.
*/
@java.lang.Override
public com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType getSourceType() {
com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType result =
com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType.forNumber(sourceType_);
return result == null
? com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @param value The sourceType to set.
* @return This builder for chaining.
*/
public Builder setSourceType(
com.google.cloud.alloydb.v1.MigrationSource.MigrationSourceType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
sourceType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Type of migration source.
* </pre>
*
* <code>
* .google.cloud.alloydb.v1.MigrationSource.MigrationSourceType source_type = 3 [(.google.api.field_behavior) = OUTPUT_ONLY];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearSourceType() {
bitField0_ = (bitField0_ & ~0x00000004);
sourceType_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1.MigrationSource)
}
// @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1.MigrationSource)
private static final com.google.cloud.alloydb.v1.MigrationSource DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1.MigrationSource();
}
public static com.google.cloud.alloydb.v1.MigrationSource getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MigrationSource> PARSER =
new com.google.protobuf.AbstractParser<MigrationSource>() {
@java.lang.Override
public MigrationSource parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MigrationSource> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MigrationSource> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.alloydb.v1.MigrationSource getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,653 | java-document-ai/proto-google-cloud-document-ai-v1beta3/src/main/java/com/google/cloud/documentai/v1beta3/ReviewDocumentResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/documentai/v1beta3/document_processor_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.documentai.v1beta3;
/**
*
*
* <pre>
* Response message for the
* [ReviewDocument][google.cloud.documentai.v1beta3.DocumentProcessorService.ReviewDocument]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.documentai.v1beta3.ReviewDocumentResponse}
*/
public final class ReviewDocumentResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.documentai.v1beta3.ReviewDocumentResponse)
ReviewDocumentResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ReviewDocumentResponse.newBuilder() to construct.
private ReviewDocumentResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ReviewDocumentResponse() {
gcsDestination_ = "";
state_ = 0;
rejectionReason_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ReviewDocumentResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1beta3_ReviewDocumentResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1beta3_ReviewDocumentResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.class,
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.Builder.class);
}
/**
*
*
* <pre>
* Possible states of the review operation.
* </pre>
*
* Protobuf enum {@code google.cloud.documentai.v1beta3.ReviewDocumentResponse.State}
*/
public enum State implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* The default value. This value is used if the state is omitted.
* </pre>
*
* <code>STATE_UNSPECIFIED = 0;</code>
*/
STATE_UNSPECIFIED(0),
/**
*
*
* <pre>
* The review operation is rejected by the reviewer.
* </pre>
*
* <code>REJECTED = 1;</code>
*/
REJECTED(1),
/**
*
*
* <pre>
* The review operation is succeeded.
* </pre>
*
* <code>SUCCEEDED = 2;</code>
*/
SUCCEEDED(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* The default value. This value is used if the state is omitted.
* </pre>
*
* <code>STATE_UNSPECIFIED = 0;</code>
*/
public static final int STATE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* The review operation is rejected by the reviewer.
* </pre>
*
* <code>REJECTED = 1;</code>
*/
public static final int REJECTED_VALUE = 1;
/**
*
*
* <pre>
* The review operation is succeeded.
* </pre>
*
* <code>SUCCEEDED = 2;</code>
*/
public static final int SUCCEEDED_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static State valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static State forNumber(int value) {
switch (value) {
case 0:
return STATE_UNSPECIFIED;
case 1:
return REJECTED;
case 2:
return SUCCEEDED;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<State> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<State> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<State>() {
public State findValueByNumber(int number) {
return State.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final State[] VALUES = values();
public static State valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private State(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.documentai.v1beta3.ReviewDocumentResponse.State)
}
public static final int GCS_DESTINATION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object gcsDestination_ = "";
/**
*
*
* <pre>
* The Cloud Storage uri for the human reviewed document if the review is
* succeeded.
* </pre>
*
* <code>string gcs_destination = 1;</code>
*
* @return The gcsDestination.
*/
@java.lang.Override
public java.lang.String getGcsDestination() {
java.lang.Object ref = gcsDestination_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
gcsDestination_ = s;
return s;
}
}
/**
*
*
* <pre>
* The Cloud Storage uri for the human reviewed document if the review is
* succeeded.
* </pre>
*
* <code>string gcs_destination = 1;</code>
*
* @return The bytes for gcsDestination.
*/
@java.lang.Override
public com.google.protobuf.ByteString getGcsDestinationBytes() {
java.lang.Object ref = gcsDestination_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
gcsDestination_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int STATE_FIELD_NUMBER = 2;
private int state_ = 0;
/**
*
*
* <pre>
* The state of the review operation.
* </pre>
*
* <code>.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State state = 2;</code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* The state of the review operation.
* </pre>
*
* <code>.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State state = 2;</code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State getState() {
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State result =
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State.forNumber(state_);
return result == null
? com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State.UNRECOGNIZED
: result;
}
public static final int REJECTION_REASON_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object rejectionReason_ = "";
/**
*
*
* <pre>
* The reason why the review is rejected by reviewer.
* </pre>
*
* <code>string rejection_reason = 3;</code>
*
* @return The rejectionReason.
*/
@java.lang.Override
public java.lang.String getRejectionReason() {
java.lang.Object ref = rejectionReason_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
rejectionReason_ = s;
return s;
}
}
/**
*
*
* <pre>
* The reason why the review is rejected by reviewer.
* </pre>
*
* <code>string rejection_reason = 3;</code>
*
* @return The bytes for rejectionReason.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRejectionReasonBytes() {
java.lang.Object ref = rejectionReason_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
rejectionReason_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gcsDestination_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, gcsDestination_);
}
if (state_
!= com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State.STATE_UNSPECIFIED
.getNumber()) {
output.writeEnum(2, state_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rejectionReason_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, rejectionReason_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(gcsDestination_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, gcsDestination_);
}
if (state_
!= com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State.STATE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(2, state_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rejectionReason_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, rejectionReason_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.documentai.v1beta3.ReviewDocumentResponse)) {
return super.equals(obj);
}
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse other =
(com.google.cloud.documentai.v1beta3.ReviewDocumentResponse) obj;
if (!getGcsDestination().equals(other.getGcsDestination())) return false;
if (state_ != other.state_) return false;
if (!getRejectionReason().equals(other.getRejectionReason())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + GCS_DESTINATION_FIELD_NUMBER;
hash = (53 * hash) + getGcsDestination().hashCode();
hash = (37 * hash) + STATE_FIELD_NUMBER;
hash = (53 * hash) + state_;
hash = (37 * hash) + REJECTION_REASON_FIELD_NUMBER;
hash = (53 * hash) + getRejectionReason().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the
* [ReviewDocument][google.cloud.documentai.v1beta3.DocumentProcessorService.ReviewDocument]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.documentai.v1beta3.ReviewDocumentResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1beta3.ReviewDocumentResponse)
com.google.cloud.documentai.v1beta3.ReviewDocumentResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1beta3_ReviewDocumentResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1beta3_ReviewDocumentResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.class,
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.Builder.class);
}
// Construct using com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
gcsDestination_ = "";
state_ = 0;
rejectionReason_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService
.internal_static_google_cloud_documentai_v1beta3_ReviewDocumentResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.ReviewDocumentResponse getDefaultInstanceForType() {
return com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.ReviewDocumentResponse build() {
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.ReviewDocumentResponse buildPartial() {
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse result =
new com.google.cloud.documentai.v1beta3.ReviewDocumentResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.documentai.v1beta3.ReviewDocumentResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.gcsDestination_ = gcsDestination_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.state_ = state_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.rejectionReason_ = rejectionReason_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.documentai.v1beta3.ReviewDocumentResponse) {
return mergeFrom((com.google.cloud.documentai.v1beta3.ReviewDocumentResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.documentai.v1beta3.ReviewDocumentResponse other) {
if (other == com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.getDefaultInstance())
return this;
if (!other.getGcsDestination().isEmpty()) {
gcsDestination_ = other.gcsDestination_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.state_ != 0) {
setStateValue(other.getStateValue());
}
if (!other.getRejectionReason().isEmpty()) {
rejectionReason_ = other.rejectionReason_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
gcsDestination_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
state_ = input.readEnum();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
rejectionReason_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object gcsDestination_ = "";
/**
*
*
* <pre>
* The Cloud Storage uri for the human reviewed document if the review is
* succeeded.
* </pre>
*
* <code>string gcs_destination = 1;</code>
*
* @return The gcsDestination.
*/
public java.lang.String getGcsDestination() {
java.lang.Object ref = gcsDestination_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
gcsDestination_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The Cloud Storage uri for the human reviewed document if the review is
* succeeded.
* </pre>
*
* <code>string gcs_destination = 1;</code>
*
* @return The bytes for gcsDestination.
*/
public com.google.protobuf.ByteString getGcsDestinationBytes() {
java.lang.Object ref = gcsDestination_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
gcsDestination_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The Cloud Storage uri for the human reviewed document if the review is
* succeeded.
* </pre>
*
* <code>string gcs_destination = 1;</code>
*
* @param value The gcsDestination to set.
* @return This builder for chaining.
*/
public Builder setGcsDestination(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
gcsDestination_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The Cloud Storage uri for the human reviewed document if the review is
* succeeded.
* </pre>
*
* <code>string gcs_destination = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearGcsDestination() {
gcsDestination_ = getDefaultInstance().getGcsDestination();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The Cloud Storage uri for the human reviewed document if the review is
* succeeded.
* </pre>
*
* <code>string gcs_destination = 1;</code>
*
* @param value The bytes for gcsDestination to set.
* @return This builder for chaining.
*/
public Builder setGcsDestinationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
gcsDestination_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int state_ = 0;
/**
*
*
* <pre>
* The state of the review operation.
* </pre>
*
* <code>.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State state = 2;</code>
*
* @return The enum numeric value on the wire for state.
*/
@java.lang.Override
public int getStateValue() {
return state_;
}
/**
*
*
* <pre>
* The state of the review operation.
* </pre>
*
* <code>.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State state = 2;</code>
*
* @param value The enum numeric value on the wire for state to set.
* @return This builder for chaining.
*/
public Builder setStateValue(int value) {
state_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The state of the review operation.
* </pre>
*
* <code>.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State state = 2;</code>
*
* @return The state.
*/
@java.lang.Override
public com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State getState() {
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State result =
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State.forNumber(state_);
return result == null
? com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* The state of the review operation.
* </pre>
*
* <code>.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State state = 2;</code>
*
* @param value The state to set.
* @return This builder for chaining.
*/
public Builder setState(
com.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
state_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The state of the review operation.
* </pre>
*
* <code>.google.cloud.documentai.v1beta3.ReviewDocumentResponse.State state = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearState() {
bitField0_ = (bitField0_ & ~0x00000002);
state_ = 0;
onChanged();
return this;
}
private java.lang.Object rejectionReason_ = "";
/**
*
*
* <pre>
* The reason why the review is rejected by reviewer.
* </pre>
*
* <code>string rejection_reason = 3;</code>
*
* @return The rejectionReason.
*/
public java.lang.String getRejectionReason() {
java.lang.Object ref = rejectionReason_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
rejectionReason_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The reason why the review is rejected by reviewer.
* </pre>
*
* <code>string rejection_reason = 3;</code>
*
* @return The bytes for rejectionReason.
*/
public com.google.protobuf.ByteString getRejectionReasonBytes() {
java.lang.Object ref = rejectionReason_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
rejectionReason_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The reason why the review is rejected by reviewer.
* </pre>
*
* <code>string rejection_reason = 3;</code>
*
* @param value The rejectionReason to set.
* @return This builder for chaining.
*/
public Builder setRejectionReason(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
rejectionReason_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The reason why the review is rejected by reviewer.
* </pre>
*
* <code>string rejection_reason = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearRejectionReason() {
rejectionReason_ = getDefaultInstance().getRejectionReason();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The reason why the review is rejected by reviewer.
* </pre>
*
* <code>string rejection_reason = 3;</code>
*
* @param value The bytes for rejectionReason to set.
* @return This builder for chaining.
*/
public Builder setRejectionReasonBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
rejectionReason_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1beta3.ReviewDocumentResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta3.ReviewDocumentResponse)
private static final com.google.cloud.documentai.v1beta3.ReviewDocumentResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.documentai.v1beta3.ReviewDocumentResponse();
}
public static com.google.cloud.documentai.v1beta3.ReviewDocumentResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ReviewDocumentResponse> PARSER =
new com.google.protobuf.AbstractParser<ReviewDocumentResponse>() {
@java.lang.Override
public ReviewDocumentResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ReviewDocumentResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ReviewDocumentResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.documentai.v1beta3.ReviewDocumentResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,759 | java-geminidataanalytics/proto-google-cloud-geminidataanalytics-v1beta/src/main/java/com/google/cloud/geminidataanalytics/v1beta/BigQueryTableReferences.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/geminidataanalytics/v1beta/datasource.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.geminidataanalytics.v1beta;
/**
*
*
* <pre>
* Message representing references to BigQuery tables.
* </pre>
*
* Protobuf type {@code google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences}
*/
public final class BigQueryTableReferences extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences)
BigQueryTableReferencesOrBuilder {
private static final long serialVersionUID = 0L;
// Use BigQueryTableReferences.newBuilder() to construct.
private BigQueryTableReferences(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BigQueryTableReferences() {
tableReferences_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BigQueryTableReferences();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.geminidataanalytics.v1beta.DatasourceProto
.internal_static_google_cloud_geminidataanalytics_v1beta_BigQueryTableReferences_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.geminidataanalytics.v1beta.DatasourceProto
.internal_static_google_cloud_geminidataanalytics_v1beta_BigQueryTableReferences_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences.class,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences.Builder.class);
}
public static final int TABLE_REFERENCES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference>
tableReferences_;
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference>
getTableReferencesList() {
return tableReferences_;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferenceOrBuilder>
getTableReferencesOrBuilderList() {
return tableReferences_;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public int getTableReferencesCount() {
return tableReferences_.size();
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference getTableReferences(
int index) {
return tableReferences_.get(index);
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferenceOrBuilder
getTableReferencesOrBuilder(int index) {
return tableReferences_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < tableReferences_.size(); i++) {
output.writeMessage(1, tableReferences_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < tableReferences_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tableReferences_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences)) {
return super.equals(obj);
}
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences other =
(com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences) obj;
if (!getTableReferencesList().equals(other.getTableReferencesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTableReferencesCount() > 0) {
hash = (37 * hash) + TABLE_REFERENCES_FIELD_NUMBER;
hash = (53 * hash) + getTableReferencesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message representing references to BigQuery tables.
* </pre>
*
* Protobuf type {@code google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences)
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferencesOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.geminidataanalytics.v1beta.DatasourceProto
.internal_static_google_cloud_geminidataanalytics_v1beta_BigQueryTableReferences_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.geminidataanalytics.v1beta.DatasourceProto
.internal_static_google_cloud_geminidataanalytics_v1beta_BigQueryTableReferences_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences.class,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences.Builder.class);
}
// Construct using
// com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (tableReferencesBuilder_ == null) {
tableReferences_ = java.util.Collections.emptyList();
} else {
tableReferences_ = null;
tableReferencesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.geminidataanalytics.v1beta.DatasourceProto
.internal_static_google_cloud_geminidataanalytics_v1beta_BigQueryTableReferences_descriptor;
}
@java.lang.Override
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences
getDefaultInstanceForType() {
return com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences build() {
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences buildPartial() {
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences result =
new com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences result) {
if (tableReferencesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
tableReferences_ = java.util.Collections.unmodifiableList(tableReferences_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tableReferences_ = tableReferences_;
} else {
result.tableReferences_ = tableReferencesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences) {
return mergeFrom(
(com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences other) {
if (other
== com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences
.getDefaultInstance()) return this;
if (tableReferencesBuilder_ == null) {
if (!other.tableReferences_.isEmpty()) {
if (tableReferences_.isEmpty()) {
tableReferences_ = other.tableReferences_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTableReferencesIsMutable();
tableReferences_.addAll(other.tableReferences_);
}
onChanged();
}
} else {
if (!other.tableReferences_.isEmpty()) {
if (tableReferencesBuilder_.isEmpty()) {
tableReferencesBuilder_.dispose();
tableReferencesBuilder_ = null;
tableReferences_ = other.tableReferences_;
bitField0_ = (bitField0_ & ~0x00000001);
tableReferencesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTableReferencesFieldBuilder()
: null;
} else {
tableReferencesBuilder_.addAllMessages(other.tableReferences_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference m =
input.readMessage(
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.parser(),
extensionRegistry);
if (tableReferencesBuilder_ == null) {
ensureTableReferencesIsMutable();
tableReferences_.add(m);
} else {
tableReferencesBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference>
tableReferences_ = java.util.Collections.emptyList();
private void ensureTableReferencesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
tableReferences_ =
new java.util.ArrayList<
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference>(
tableReferences_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.Builder,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferenceOrBuilder>
tableReferencesBuilder_;
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference>
getTableReferencesList() {
if (tableReferencesBuilder_ == null) {
return java.util.Collections.unmodifiableList(tableReferences_);
} else {
return tableReferencesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public int getTableReferencesCount() {
if (tableReferencesBuilder_ == null) {
return tableReferences_.size();
} else {
return tableReferencesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference getTableReferences(
int index) {
if (tableReferencesBuilder_ == null) {
return tableReferences_.get(index);
} else {
return tableReferencesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTableReferences(
int index, com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference value) {
if (tableReferencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTableReferencesIsMutable();
tableReferences_.set(index, value);
onChanged();
} else {
tableReferencesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setTableReferences(
int index,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.Builder
builderForValue) {
if (tableReferencesBuilder_ == null) {
ensureTableReferencesIsMutable();
tableReferences_.set(index, builderForValue.build());
onChanged();
} else {
tableReferencesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addTableReferences(
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference value) {
if (tableReferencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTableReferencesIsMutable();
tableReferences_.add(value);
onChanged();
} else {
tableReferencesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addTableReferences(
int index, com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference value) {
if (tableReferencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTableReferencesIsMutable();
tableReferences_.add(index, value);
onChanged();
} else {
tableReferencesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addTableReferences(
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.Builder
builderForValue) {
if (tableReferencesBuilder_ == null) {
ensureTableReferencesIsMutable();
tableReferences_.add(builderForValue.build());
onChanged();
} else {
tableReferencesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addTableReferences(
int index,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.Builder
builderForValue) {
if (tableReferencesBuilder_ == null) {
ensureTableReferencesIsMutable();
tableReferences_.add(index, builderForValue.build());
onChanged();
} else {
tableReferencesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder addAllTableReferences(
java.lang.Iterable<
? extends com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference>
values) {
if (tableReferencesBuilder_ == null) {
ensureTableReferencesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tableReferences_);
onChanged();
} else {
tableReferencesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearTableReferences() {
if (tableReferencesBuilder_ == null) {
tableReferences_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
tableReferencesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder removeTableReferences(int index) {
if (tableReferencesBuilder_ == null) {
ensureTableReferencesIsMutable();
tableReferences_.remove(index);
onChanged();
} else {
tableReferencesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.Builder
getTableReferencesBuilder(int index) {
return getTableReferencesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferenceOrBuilder
getTableReferencesOrBuilder(int index) {
if (tableReferencesBuilder_ == null) {
return tableReferences_.get(index);
} else {
return tableReferencesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<
? extends com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferenceOrBuilder>
getTableReferencesOrBuilderList() {
if (tableReferencesBuilder_ != null) {
return tableReferencesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tableReferences_);
}
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.Builder
addTableReferencesBuilder() {
return getTableReferencesFieldBuilder()
.addBuilder(
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference
.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.Builder
addTableReferencesBuilder(int index) {
return getTableReferencesFieldBuilder()
.addBuilder(
index,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference
.getDefaultInstance());
}
/**
*
*
* <pre>
* Required. References to BigQuery tables.
* </pre>
*
* <code>
* repeated .google.cloud.geminidataanalytics.v1beta.BigQueryTableReference table_references = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public java.util.List<
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.Builder>
getTableReferencesBuilderList() {
return getTableReferencesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.Builder,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferenceOrBuilder>
getTableReferencesFieldBuilder() {
if (tableReferencesBuilder_ == null) {
tableReferencesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReference.Builder,
com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferenceOrBuilder>(
tableReferences_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
tableReferences_ = null;
}
return tableReferencesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences)
}
// @@protoc_insertion_point(class_scope:google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences)
private static final com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences();
}
public static com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BigQueryTableReferences> PARSER =
new com.google.protobuf.AbstractParser<BigQueryTableReferences>() {
@java.lang.Override
public BigQueryTableReferences parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BigQueryTableReferences> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BigQueryTableReferences> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.geminidataanalytics.v1beta.BigQueryTableReferences
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/graal | 35,828 | truffle/src/com.oracle.truffle.api.instrumentation.test/src/com/oracle/truffle/api/instrumentation/test/TruffleContextTest.java | /*
* Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.truffle.api.instrumentation.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayOutputStream;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Supplier;
import org.graalvm.options.OptionValues;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Engine;
import org.graalvm.polyglot.PolyglotException;
import org.graalvm.polyglot.Source;
import org.junit.After;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import com.oracle.truffle.api.CallTarget;
import com.oracle.truffle.api.CompilerDirectives;
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
import com.oracle.truffle.api.TruffleContext;
import com.oracle.truffle.api.TruffleLanguage;
import com.oracle.truffle.api.TruffleSafepoint;
import com.oracle.truffle.api.exception.AbstractTruffleException;
import com.oracle.truffle.api.frame.VirtualFrame;
import com.oracle.truffle.api.instrumentation.EventContext;
import com.oracle.truffle.api.instrumentation.ExecutionEventListener;
import com.oracle.truffle.api.instrumentation.SourceSectionFilter;
import com.oracle.truffle.api.instrumentation.StandardTags;
import com.oracle.truffle.api.instrumentation.ThreadsActivationListener;
import com.oracle.truffle.api.interop.ArityException;
import com.oracle.truffle.api.interop.InteropException;
import com.oracle.truffle.api.interop.InteropLibrary;
import com.oracle.truffle.api.interop.TruffleObject;
import com.oracle.truffle.api.interop.UnsupportedMessageException;
import com.oracle.truffle.api.interop.UnsupportedTypeException;
import com.oracle.truffle.api.library.ExportLibrary;
import com.oracle.truffle.api.library.ExportMessage;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.nodes.RootNode;
import com.oracle.truffle.api.test.polyglot.AbstractPolyglotTest;
import com.oracle.truffle.api.test.polyglot.LanguageSPIOrderTest;
import com.oracle.truffle.api.test.polyglot.ProxyLanguage;
import com.oracle.truffle.api.test.polyglot.ProxyLanguage.LanguageContext;
public class TruffleContextTest extends AbstractPolyglotTest {
@Rule public TestName testNameRule = new TestName();
public TruffleContextTest() {
needsLanguageEnv = true;
needsInstrumentEnv = true;
ignoreCancelOnClose = true;
ignoreExitOnClose = true;
}
@After
public void checkInterrupted() {
Assert.assertFalse("Interrupted flag was left set by test: " + testNameRule.getMethodName(), Thread.interrupted());
}
@Test
public void testCreate() {
setupEnv();
TruffleContext tc = languageEnv.newInnerContextBuilder().build();
assertNotEquals(tc, languageEnv.getContext());
assertFalse(tc.isEntered());
assertFalse(tc.isClosed());
assertFalse(tc.isCancelling());
assertFalse(tc.isExiting());
assertNotNull(tc.toString());
assertEquals(tc.getParent(), languageEnv.getContext());
Object prev = tc.enter(null);
assertTrue(tc.isEntered());
assertFalse(tc.isClosed());
tc.leave(null, prev);
assertFalse(tc.isEntered());
assertFalse(tc.isClosed());
tc.close();
}
@Test
public void testSimpleForceClose() {
setupEnv();
TruffleContext tc = languageEnv.newInnerContextBuilder().build();
assertFalse(tc.isClosed());
assertFalse(tc.isCancelling());
tc.closeCancelled(null, "testreason");
assertTrue(tc.isClosed());
assertFalse(tc.isCancelling());
}
@Test
public void testParallelForceClose() throws InterruptedException {
List<Thread> threads = new ArrayList<>();
setupEnv(Context.newBuilder().allowAllAccess(true).option("engine.TriggerUncaughtExceptionHandlerForCancel", "true").build(),
new ProxyLanguage() {
@Override
protected boolean isThreadAccessAllowed(Thread thread, boolean singleThreaded) {
return true;
}
@Override
protected void finalizeContext(LanguageContext langContext) {
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
}
}
});
TruffleContext tc = languageEnv.newInnerContextBuilder().inheritAllAccess(true).initializeCreatorContext(true).build();
List<AtomicReference<Throwable>> exceptions = new ArrayList<>();
Semaphore waitUntilStart = new Semaphore(0);
for (int i = 0; i < 100; i++) {
Thread t = languageEnv.newTruffleThreadBuilder(() -> {
com.oracle.truffle.api.source.Source s = com.oracle.truffle.api.source.Source.newBuilder(InstrumentationTestLanguage.ID, "EXPRESSION", "").build();
CallTarget target = LanguageContext.get(null).getEnv().parsePublic(s);
while (true) {
target.call();
// at least one thread should have started execution
waitUntilStart.release();
}
}).context(tc).build();
AtomicReference<Throwable> exception = new AtomicReference<>();
t.setUncaughtExceptionHandler((thread, e) -> {
exception.set(e);
});
exceptions.add(exception);
t.start();
threads.add(t);
}
// 10s ought to be enough for anybody
if (!waitUntilStart.tryAcquire(10000, TimeUnit.MILLISECONDS)) {
for (AtomicReference<Throwable> e : exceptions) {
if (e.get() != null) {
throw new AssertionError(e.get());
}
}
throw new AssertionError("failed to wait for execution");
}
assertFalse(tc.isClosed());
for (int i = 0; i < threads.size(); i++) {
assertNull(exceptions.get(i).get());
}
tc.closeCancelled(null, "testreason");
for (int i = 0; i < threads.size(); i++) {
Throwable e = exceptions.get(i).get();
assertNotNull(e);
assertEquals(getCancelExecutionClass(), e.getClass());
assertEquals("testreason", e.getMessage());
assertTrue(tc.isClosed());
}
}
@Test
public void testCloseInEntered() {
setupEnv();
TruffleContext tc = languageEnv.newInnerContextBuilder().initializeCreatorContext(true).build();
Node node = new Node() {
};
Object prev = tc.enter(null);
assertFails(() -> tc.close(), IllegalStateException.class);
assertFails(() -> tc.closeCancelled(node, "testreason"), getCancelExecutionClass(), (e) -> {
assertSame(getCancelExecutionLocation(e), node);
assertEquals("testreason", e.getMessage());
});
assertFails(() -> tc.closeResourceExhausted(node, "testreason"), getCancelExecutionClass(), (e) -> {
assertSame(getCancelExecutionLocation(e), node);
assertEquals("testreason", e.getMessage());
});
tc.leave(null, prev);
}
@Test
public void testCancelledAndResourceExhausted() throws InterruptedException {
setupEnv();
AtomicReference<Throwable> error = new AtomicReference<>();
Thread t = new Thread(() -> {
try {
context.eval(InstrumentationTestLanguage.ID, "LOOP(infinity, STATEMENT)");
} catch (Throwable e) {
error.set(e);
}
});
context.leave(); // avoid need for multi-threading
AtomicReference<TruffleContext> enter = new AtomicReference<>();
Semaphore waitUntilEntered = new Semaphore(0);
instrumentEnv.getInstrumenter().attachThreadsActivationListener(new ThreadsActivationListener() {
@TruffleBoundary
public void onEnterThread(TruffleContext tc) {
enter.set(tc);
waitUntilEntered.release();
}
public void onLeaveThread(TruffleContext tc) {
}
});
t.start();
if (!waitUntilEntered.tryAcquire(10000, TimeUnit.MILLISECONDS)) {
throw new AssertionError(error.get());
}
TruffleContext tc = enter.get();
tc.closeResourceExhausted(null, "testError");
t.join();
assertNotNull(error.get());
assertTrue(error.get().toString(), error.get() instanceof PolyglotException);
PolyglotException e = (PolyglotException) error.get();
assertEquals("testError", e.getMessage());
assertTrue(e.isCancelled());
assertTrue(e.isResourceExhausted());
}
@Test
public void testCancelling() throws ExecutionException, InterruptedException {
setupEnv();
ExecutorService executorService = Executors.newFixedThreadPool(1);
try {
context.leave(); // avoid need for multi-threading
AtomicReference<TruffleContext> entered = new AtomicReference<>();
CountDownLatch waitUntilStatementExecuted = new CountDownLatch(1);
instrumentEnv.getInstrumenter().attachExecutionEventListener(SourceSectionFilter.newBuilder().tagIs(StandardTags.StatementTag.class).build(), new ExecutionEventListener() {
@TruffleBoundary
@Override
public void onEnter(EventContext ctx, VirtualFrame frame) {
entered.set(instrumentEnv.getEnteredContext());
waitUntilStatementExecuted.countDown();
}
@Override
public void onReturnValue(EventContext ctx, VirtualFrame frame, Object result) {
}
@Override
public void onReturnExceptional(EventContext ctx, VirtualFrame frame, Throwable exception) {
}
});
Future<?> future = executorService.submit(() -> {
context.enter();
try {
context.eval(InstrumentationTestLanguage.ID, "LOOP(infinity, STATEMENT)");
fail();
} catch (PolyglotException pe) {
if (!pe.isCancelled() || !pe.isResourceExhausted()) {
throw pe;
}
assertEquals("testError", pe.getMessage());
assertTrue(entered.get().isCancelling());
} finally {
context.leave();
}
});
waitUntilStatementExecuted.await();
TruffleContext tc = entered.get();
assertFalse(tc.isCancelling());
tc.closeResourceExhausted(null, "testError");
future.get();
assertFalse(tc.isCancelling());
assertTrue(tc.isClosed());
} finally {
executorService.shutdownNow();
executorService.awaitTermination(100, TimeUnit.SECONDS);
}
}
@Test
public void testExiting() throws ExecutionException, InterruptedException {
setupEnv(Context.newBuilder(), new ProxyLanguage() {
@Override
protected boolean isThreadAccessAllowed(Thread thread, boolean singleThreaded) {
return true;
}
});
ExecutorService executorService = Executors.newFixedThreadPool(1);
try {
AtomicReference<TruffleContext> entered = new AtomicReference<>();
CountDownLatch waitUntilExited = new CountDownLatch(1);
instrumentEnv.getInstrumenter().attachExecutionEventListener(SourceSectionFilter.newBuilder().tagIs(StandardTags.StatementTag.class).build(), new ExecutionEventListener() {
@TruffleBoundary
@Override
public void onEnter(EventContext ctx, VirtualFrame frame) {
entered.set(instrumentEnv.getEnteredContext());
}
@Override
public void onReturnValue(EventContext ctx, VirtualFrame frame, Object result) {
}
@Override
public void onReturnExceptional(EventContext ctx, VirtualFrame frame, Throwable exception) {
}
});
Future<?> future = executorService.submit(() -> {
context.enter();
try {
context.eval(InstrumentationTestLanguage.ID, "ROOT(STATEMENT,EXIT(1))");
fail();
} catch (PolyglotException pe) {
if (!pe.isExit()) {
throw pe;
}
assertEquals(1, pe.getExitStatus());
assertTrue(entered.get().isExiting());
} finally {
context.leave();
}
waitUntilExited.countDown();
});
try {
TruffleSafepoint.setBlockedThreadInterruptible(null, CountDownLatch::await, waitUntilExited);
} catch (ThreadDeath e) {
if (!"Exit was called with exit code 1.".equals(e.getMessage())) {
throw e;
}
}
/*
* Multi-threading is necessary, otherwise the context is closed while entered and we
* cannot check isExiting().
*/
context.leave();
TruffleContext tc = entered.get();
tc.close();
future.get();
assertFalse(tc.isExiting());
assertTrue(tc.isClosed());
} finally {
executorService.shutdownNow();
executorService.awaitTermination(100, TimeUnit.SECONDS);
}
}
@Test
public void testCancellingUncaughtExceptionHandler() {
ByteArrayOutputStream out = new ByteArrayOutputStream();
setupEnv(Context.newBuilder().allowAllAccess(true).err(out).build(), new ProxyLanguage() {
@Override
protected CallTarget parse(ParsingRequest request) {
RootNode rootNode;
String command = request.getSource().getCharacters().toString();
switch (command) {
case "controller":
rootNode = new ControllerNode(languageInstance);
break;
case "worker":
rootNode = new WorkerNode(languageInstance);
break;
default:
throw CompilerDirectives.shouldNotReachHere("Unknown request: " + command);
}
return rootNode.getCallTarget();
}
});
context.eval(Source.newBuilder(ProxyLanguage.ID, "controller", "test").buildLiteral());
assertFalse(out.toString().contains(getCancelExecutionClass().getName()));
}
@Test
public void testContextHierarchy() {
setupEnv();
TruffleContext tc1 = languageEnv.newInnerContextBuilder().build();
TruffleContext tc2 = languageEnv.newInnerContextBuilder().build();
assertFalse(tc1.isActive());
assertFalse(tc1.isEntered());
assertFalse(tc2.isActive());
assertFalse(tc2.isEntered());
Object prev1 = tc1.enter(null);
assertTrue(tc1.isActive());
assertTrue(tc1.isEntered());
assertFalse(tc2.isActive());
assertFalse(tc2.isEntered());
Object prev2 = tc2.enter(null);
assertTrue(tc1.isActive());
assertFalse(tc1.isEntered());
assertTrue(tc2.isActive());
assertTrue(tc2.isEntered());
assertFails(() -> tc1.close(), IllegalStateException.class);
assertFails(() -> tc1.closeCancelled(null, ""), IllegalStateException.class);
assertFails(() -> tc1.closeResourceExhausted(null, ""), IllegalStateException.class);
tc2.leave(null, prev2);
assertTrue(tc1.isActive());
assertTrue(tc1.isEntered());
assertFalse(tc2.isActive());
assertFalse(tc2.isEntered());
tc1.leave(null, prev1);
assertFalse(tc1.isActive());
assertFalse(tc1.isEntered());
assertFalse(tc2.isActive());
assertFalse(tc2.isEntered());
prev1 = tc1.enter(null);
prev2 = tc2.enter(null);
Object prev3 = tc1.enter(null);
assertFails(() -> tc1.close(), IllegalStateException.class);
// we allow cancel in this case. the error will be propagated an the caller
// need to make sure to either propagate the cancel the parent context
assertFails(() -> tc1.closeCancelled(null, ""), getCancelExecutionClass());
assertFails(() -> tc1.closeResourceExhausted(null, ""), getCancelExecutionClass());
tc1.leave(null, prev3);
tc2.leave(null, prev2);
tc1.leave(null, prev1);
tc2.close();
tc1.close();
}
@Test
public void testLeaveAndEnter() {
setupEnv();
TruffleContext tc = languageEnv.getContext();
assertTrue(tc.isEntered());
int value = tc.leaveAndEnter(null, TruffleSafepoint.Interrupter.THREAD_INTERRUPT, (x) -> {
assertFalse(tc.isEntered());
assertFalse(tc.isClosed());
return 42;
}, null);
assertEquals(42, value);
assertTrue(tc.isEntered());
assertFalse(tc.isClosed());
}
@Test
public void testInitializeCreatorContext() {
setupEnv();
TruffleContext innerContext = languageEnv.newInnerContextBuilder().initializeCreatorContext(false).build();
Object prev = innerContext.enter(null);
try {
assertNull(ProxyLanguage.LanguageContext.get(null));
} finally {
innerContext.leave(null, prev);
}
innerContext.close();
innerContext = languageEnv.newInnerContextBuilder().initializeCreatorContext(true).build();
prev = innerContext.enter(null);
try {
assertNotNull(ProxyLanguage.LanguageContext.get(null));
} finally {
innerContext.leave(null, prev);
}
innerContext.close();
}
@Test
public void testEvalInnerContextEvalErrors() {
setupEnv();
// regualar context must not be used
TruffleContext currentContext = languageEnv.getContext();
assertFails(() -> currentContext.evalInternal(null, newTruffleSource()), IllegalStateException.class, (e) -> {
assertEquals("Only created inner contexts can be used to evaluate sources. " +
"Use TruffleLanguage.Env.parseInternal(Source) or TruffleInstrument.Env.parse(Source) instead.", e.getMessage());
});
TruffleContext innerContext = languageEnv.newInnerContextBuilder().initializeCreatorContext(true).build();
// inner context must not be entered for eval
Object prev = innerContext.enter(null);
assertFails(() -> innerContext.evalInternal(null, newTruffleSource()), IllegalStateException.class, (e) -> {
assertEquals("Invalid parent context entered. " +
"The parent creator context or no context must be entered to evaluate code in an inner context.", e.getMessage());
});
innerContext.leave(null, prev);
assertFails(() -> innerContext.evalInternal(null, com.oracle.truffle.api.source.Source.newBuilder("foobarbazz$_", "", "").build()), IllegalArgumentException.class, (e) -> {
assertTrue(e.getMessage(), e.getMessage().startsWith("A language with id 'foobarbazz$_' is not available. Available languages are:"));
});
innerContext.close();
}
@Test
public void testEvalInnerContextError() throws InteropException {
EvalContextTestException innerException = new EvalContextTestException();
EvalContextTestObject outerObject = new EvalContextTestObject();
setupLanguageThatReturns(() -> {
throw innerException;
});
TruffleContext innerContext = languageEnv.newInnerContextBuilder().initializeCreatorContext(true).build();
innerException.expectedContext = innerContext;
outerObject.expectedContext = languageEnv.getContext();
try {
innerContext.evalInternal(null, newTruffleSource());
fail();
} catch (AbstractTruffleException e) {
// arguments of the parent context are entered in the outer context
Object result = InteropLibrary.getUncached().execute(e, outerObject);
// and return values are entered again in the inner context
result = InteropLibrary.getUncached().execute(result, outerObject);
try {
InteropLibrary.getUncached().throwException(result);
fail();
} catch (AbstractTruffleException innerEx) {
result = InteropLibrary.getUncached().execute(innerEx, outerObject);
}
}
assertEquals(3, innerException.executeCount);
assertEquals(3, outerObject.executeCount);
innerContext.close();
}
@SuppressWarnings("serial")
@ExportLibrary(InteropLibrary.class)
static class EvalContextTestException extends AbstractTruffleException {
TruffleContext expectedContext;
int executeCount = 0;
@ExportMessage
@TruffleBoundary
final boolean isException() {
assertTrue(expectedContext.isEntered());
return true;
}
@ExportMessage
@TruffleBoundary
final RuntimeException throwException() {
assertTrue(expectedContext.isEntered());
throw this;
}
@ExportMessage
@TruffleBoundary
final boolean isExecutable() {
assertTrue(expectedContext.isEntered());
return true;
}
@ExportMessage
@TruffleBoundary
final Object execute(Object[] args) {
assertTrue(expectedContext.isEntered());
for (Object object : args) {
try {
InteropLibrary.getUncached().execute(object);
} catch (UnsupportedTypeException | ArityException | UnsupportedMessageException e) {
throw CompilerDirectives.shouldNotReachHere(e);
}
}
executeCount++;
return this;
}
}
@Test
public void testPublicEvalInnerContext() {
// test that primitive values can just be passed through
setupLanguageThatReturns(() -> 42);
TruffleContext innerContext = languageEnv.newInnerContextBuilder().initializeCreatorContext(true).build();
Object result = innerContext.evalPublic(null, newTruffleSource());
assertEquals(42, result);
com.oracle.truffle.api.source.Source internal = com.oracle.truffle.api.source.Source.newBuilder(LanguageSPIOrderTest.INTERNAL, "", "test").build();
assertFails(() -> innerContext.evalPublic(null, internal), IllegalArgumentException.class);
innerContext.close();
}
@Test
public void testEvalInnerContext() throws InteropException {
// test that primitive values can just be passed through
setupLanguageThatReturns(() -> 42);
TruffleContext innerContext = languageEnv.newInnerContextBuilder().initializeCreatorContext(true).build();
Object result = innerContext.evalInternal(null, newTruffleSource());
assertEquals(42, result);
// test that objects that cross the boundary are entered in the inner context
EvalContextTestObject innerObject = new EvalContextTestObject();
EvalContextTestObject outerObject = new EvalContextTestObject();
innerContext.close();
setupLanguageThatReturns(() -> innerObject);
innerContext = languageEnv.newInnerContextBuilder().initializeCreatorContext(true).build();
innerObject.expectedContext = innerContext;
outerObject.expectedContext = this.languageEnv.getContext();
result = innerContext.evalInternal(null, newTruffleSource());
assertNotEquals("must be wrapped", result, innerObject);
// arguments of the parent context are entered in the outer context
result = InteropLibrary.getUncached().execute(result, outerObject);
// try a void method
InteropLibrary.getUncached().toNative(result);
// and return values are entered again in the inner context
result = InteropLibrary.getUncached().execute(result, outerObject);
assertEquals(2, innerObject.executeCount);
assertEquals(2, outerObject.executeCount);
innerContext.close();
}
@ExportLibrary(InteropLibrary.class)
static class EvalContextTestObject implements TruffleObject {
TruffleContext expectedContext;
int executeCount = 0;
@ExportMessage
@TruffleBoundary
final boolean isExecutable() {
assertTrue(expectedContext.isEntered());
return true;
}
@ExportMessage
@TruffleBoundary
final Object execute(Object[] args) {
assertTrue(expectedContext.isEntered());
for (Object object : args) {
try {
InteropLibrary.getUncached().execute(object);
} catch (UnsupportedTypeException | ArityException | UnsupportedMessageException e) {
throw CompilerDirectives.shouldNotReachHere(e);
}
}
executeCount++;
return this;
}
}
@Test
public void testNoInitializeMultiContextForInnerContext() {
AtomicBoolean multiContextInitialized = new AtomicBoolean(false);
setupEnv(Context.create(), new ProxyLanguage() {
@Override
protected CallTarget parse(ParsingRequest request) throws Exception {
return RootNode.createConstantNode(42).getCallTarget();
}
@Override
protected void initializeMultipleContexts() {
multiContextInitialized.set(true);
}
@Override
protected boolean areOptionsCompatible(OptionValues firstOptions, OptionValues newOptions) {
return true;
}
});
TruffleContext internalContext = languageEnv.newInnerContextBuilder().initializeCreatorContext(true).initializeCreatorContext(false).build();
internalContext.evalInternal(null, com.oracle.truffle.api.source.Source.newBuilder(ProxyLanguage.ID, "", "").build());
assertFalse(multiContextInitialized.get());
internalContext.close();
}
@Test
public void testInitializeMultiContextForInnerContext() {
AtomicBoolean multiContextInitialized = new AtomicBoolean(false);
setupEnv(Context.newBuilder().engine(Engine.create()).build(), new ProxyLanguage() {
@Override
protected CallTarget parse(ParsingRequest request) throws Exception {
return RootNode.createConstantNode(42).getCallTarget();
}
@Override
protected void initializeMultipleContexts() {
multiContextInitialized.set(true);
}
@Override
protected boolean areOptionsCompatible(OptionValues firstOptions, OptionValues newOptions) {
return true;
}
});
TruffleContext ic = languageEnv.newInnerContextBuilder().initializeCreatorContext(true).build();
assertTrue(multiContextInitialized.get());
ic.close();
}
private void setupLanguageThatReturns(Supplier<Object> supplier) {
setupEnv(Context.create(), new ProxyLanguage() {
@Override
protected CallTarget parse(ParsingRequest request) throws Exception {
return new RootNode(ProxyLanguage.get(null)) {
@Override
public Object execute(VirtualFrame frame) {
return get();
}
@TruffleBoundary
private Object get() {
return supplier.get();
}
}.getCallTarget();
}
});
}
private static com.oracle.truffle.api.source.Source newTruffleSource() {
return com.oracle.truffle.api.source.Source.newBuilder(ProxyLanguage.ID, "", "test").build();
}
@Test
public void testLeaveAndEnterInnerContext() {
setupEnv();
TruffleContext parent = languageEnv.getContext();
TruffleContext tc = languageEnv.newInnerContextBuilder().initializeCreatorContext(true).build();
assertFalse(tc.isEntered());
assertEquals(parent, tc.getParent());
try {
tc.leaveAndEnter(null, TruffleSafepoint.Interrupter.THREAD_INTERRUPT, (x) -> {
fail();
return true;
}, null);
fail();
} catch (IllegalStateException e) {
assertEquals("Context is entered 0 times. It must be entered exactly once for leaveAndEnter.", e.getMessage());
}
assertTrue(parent.isEntered());
Object prev = tc.enter(null);
try {
assertFalse(parent.isEntered());
assertTrue(parent.isActive());
int value = tc.leaveAndEnter(null, TruffleSafepoint.Interrupter.THREAD_INTERRUPT, (x) -> {
assertFalse(tc.isEntered());
assertFalse(parent.isEntered());
assertTrue(parent.isActive());
return 42;
}, null);
assertEquals(42, value);
assertTrue(tc.isEntered());
} finally {
tc.leave(null, prev);
}
tc.close();
}
private static Class<? extends Throwable> getCancelExecutionClass() {
try {
return Class.forName("com.oracle.truffle.polyglot.PolyglotEngineImpl$CancelExecution").asSubclass(Throwable.class);
} catch (ClassNotFoundException cnf) {
throw new AssertionError("Cannot load CancelExecution class.", cnf);
}
}
private static Node getCancelExecutionLocation(Throwable t) {
try {
Method m = t.getClass().getDeclaredMethod("getLocation");
m.setAccessible(true);
return (Node) m.invoke(t);
} catch (ReflectiveOperationException e) {
throw new AssertionError("Failed to invoke CancelExecution.getLocation.", e);
}
}
private static final class ControllerNode extends RootNode {
ControllerNode(TruffleLanguage<?> language) {
super(language);
}
@Override
public Object execute(VirtualFrame frame) {
return executeImpl();
}
@TruffleBoundary
private Object executeImpl() {
TruffleLanguage.Env env = LanguageContext.get(this).getEnv();
TruffleContext creatorContext = env.newInnerContextBuilder().initializeCreatorContext(true).build();
CountDownLatch running = new CountDownLatch(1);
Thread t = env.newTruffleThreadBuilder(() -> {
CallTarget target = LanguageContext.get(null).getEnv().parsePublic(com.oracle.truffle.api.source.Source.newBuilder(
ProxyLanguage.ID, "worker", "worker").build());
running.countDown();
target.call();
}).context(creatorContext).build();
try {
t.start();
running.await();
creatorContext.closeCancelled(this, "Stopping");
t.join();
return true;
} catch (InterruptedException ie) {
return false;
}
}
}
private static final class WorkerNode extends RootNode {
WorkerNode(TruffleLanguage<?> language) {
super(language);
}
@Override
public Object execute(VirtualFrame frame) {
return executeImpl();
}
@TruffleBoundary
private Object executeImpl() {
while (true) {
try {
TruffleSafepoint.poll(this);
Thread.sleep(1_000);
} catch (InterruptedException ie) {
// Ignore InterruptedException, wait for ThreadDeath.
}
}
}
}
}
|
googleapis/google-cloud-java | 35,777 | java-gkehub/proto-google-cloud-gkehub-v1/src/main/java/com/google/cloud/gkehub/configmanagement/v1/PolicyControllerState.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/gkehub/v1/configmanagement/configmanagement.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.gkehub.configmanagement.v1;
/**
*
*
* <pre>
* State for PolicyControllerState.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.configmanagement.v1.PolicyControllerState}
*/
public final class PolicyControllerState extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.gkehub.configmanagement.v1.PolicyControllerState)
PolicyControllerStateOrBuilder {
private static final long serialVersionUID = 0L;
// Use PolicyControllerState.newBuilder() to construct.
private PolicyControllerState(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private PolicyControllerState() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new PolicyControllerState();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.configmanagement.v1.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.configmanagement.v1.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState.class,
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState.Builder.class);
}
private int bitField0_;
public static final int VERSION_FIELD_NUMBER = 1;
private com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version_;
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*
* @return Whether the version field is set.
*/
@java.lang.Override
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*
* @return The version.
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion getVersion() {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion.getDefaultInstance()
: version_;
}
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersionOrBuilder
getVersionOrBuilder() {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion.getDefaultInstance()
: version_;
}
public static final int DEPLOYMENT_STATE_FIELD_NUMBER = 2;
private com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deploymentState_;
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*
* @return Whether the deploymentState field is set.
*/
@java.lang.Override
public boolean hasDeploymentState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*
* @return The deploymentState.
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState
getDeploymentState() {
return deploymentState_ == null
? com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState.getDefaultInstance()
: deploymentState_;
}
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*/
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentStateOrBuilder
getDeploymentStateOrBuilder() {
return deploymentState_ == null
? com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState.getDefaultInstance()
: deploymentState_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getVersion());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getDeploymentState());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getVersion());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getDeploymentState());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState)) {
return super.equals(obj);
}
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState other =
(com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState) obj;
if (hasVersion() != other.hasVersion()) return false;
if (hasVersion()) {
if (!getVersion().equals(other.getVersion())) return false;
}
if (hasDeploymentState() != other.hasDeploymentState()) return false;
if (hasDeploymentState()) {
if (!getDeploymentState().equals(other.getDeploymentState())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasVersion()) {
hash = (37 * hash) + VERSION_FIELD_NUMBER;
hash = (53 * hash) + getVersion().hashCode();
}
if (hasDeploymentState()) {
hash = (37 * hash) + DEPLOYMENT_STATE_FIELD_NUMBER;
hash = (53 * hash) + getDeploymentState().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* State for PolicyControllerState.
* </pre>
*
* Protobuf type {@code google.cloud.gkehub.configmanagement.v1.PolicyControllerState}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.gkehub.configmanagement.v1.PolicyControllerState)
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerStateOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.gkehub.configmanagement.v1.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerState_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.gkehub.configmanagement.v1.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerState_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState.class,
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState.Builder.class);
}
// Construct using
// com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getVersionFieldBuilder();
getDeploymentStateFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
version_ = null;
if (versionBuilder_ != null) {
versionBuilder_.dispose();
versionBuilder_ = null;
}
deploymentState_ = null;
if (deploymentStateBuilder_ != null) {
deploymentStateBuilder_.dispose();
deploymentStateBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.gkehub.configmanagement.v1.ConfigManagementProto
.internal_static_google_cloud_gkehub_configmanagement_v1_PolicyControllerState_descriptor;
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState
getDefaultInstanceForType() {
return com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState build() {
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState buildPartial() {
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState result =
new com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.version_ = versionBuilder_ == null ? version_ : versionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.deploymentState_ =
deploymentStateBuilder_ == null ? deploymentState_ : deploymentStateBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState) {
return mergeFrom((com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState other) {
if (other
== com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState.getDefaultInstance())
return this;
if (other.hasVersion()) {
mergeVersion(other.getVersion());
}
if (other.hasDeploymentState()) {
mergeDeploymentState(other.getDeploymentState());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getVersionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getDeploymentStateFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion,
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion.Builder,
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersionOrBuilder>
versionBuilder_;
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*
* @return Whether the version field is set.
*/
public boolean hasVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*
* @return The version.
*/
public com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion getVersion() {
if (versionBuilder_ == null) {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion
.getDefaultInstance()
: version_;
} else {
return versionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*/
public Builder setVersion(
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion value) {
if (versionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
version_ = value;
} else {
versionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*/
public Builder setVersion(
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion.Builder
builderForValue) {
if (versionBuilder_ == null) {
version_ = builderForValue.build();
} else {
versionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*/
public Builder mergeVersion(
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion value) {
if (versionBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& version_ != null
&& version_
!= com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion
.getDefaultInstance()) {
getVersionBuilder().mergeFrom(value);
} else {
version_ = value;
}
} else {
versionBuilder_.mergeFrom(value);
}
if (version_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*/
public Builder clearVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
version_ = null;
if (versionBuilder_ != null) {
versionBuilder_.dispose();
versionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*/
public com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion.Builder
getVersionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getVersionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*/
public com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersionOrBuilder
getVersionOrBuilder() {
if (versionBuilder_ != null) {
return versionBuilder_.getMessageOrBuilder();
} else {
return version_ == null
? com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion
.getDefaultInstance()
: version_;
}
}
/**
*
*
* <pre>
* The version of Gatekeeper Policy Controller deployed.
* </pre>
*
* <code>.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion version = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion,
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion.Builder,
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersionOrBuilder>
getVersionFieldBuilder() {
if (versionBuilder_ == null) {
versionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion,
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersion.Builder,
com.google.cloud.gkehub.configmanagement.v1.PolicyControllerVersionOrBuilder>(
getVersion(), getParentForChildren(), isClean());
version_ = null;
}
return versionBuilder_;
}
private com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deploymentState_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState,
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState.Builder,
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentStateOrBuilder>
deploymentStateBuilder_;
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*
* @return Whether the deploymentState field is set.
*/
public boolean hasDeploymentState() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*
* @return The deploymentState.
*/
public com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState
getDeploymentState() {
if (deploymentStateBuilder_ == null) {
return deploymentState_ == null
? com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState
.getDefaultInstance()
: deploymentState_;
} else {
return deploymentStateBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*/
public Builder setDeploymentState(
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState value) {
if (deploymentStateBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
deploymentState_ = value;
} else {
deploymentStateBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*/
public Builder setDeploymentState(
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState.Builder
builderForValue) {
if (deploymentStateBuilder_ == null) {
deploymentState_ = builderForValue.build();
} else {
deploymentStateBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*/
public Builder mergeDeploymentState(
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState value) {
if (deploymentStateBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& deploymentState_ != null
&& deploymentState_
!= com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState
.getDefaultInstance()) {
getDeploymentStateBuilder().mergeFrom(value);
} else {
deploymentState_ = value;
}
} else {
deploymentStateBuilder_.mergeFrom(value);
}
if (deploymentState_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*/
public Builder clearDeploymentState() {
bitField0_ = (bitField0_ & ~0x00000002);
deploymentState_ = null;
if (deploymentStateBuilder_ != null) {
deploymentStateBuilder_.dispose();
deploymentStateBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*/
public com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState.Builder
getDeploymentStateBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getDeploymentStateFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*/
public com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentStateOrBuilder
getDeploymentStateOrBuilder() {
if (deploymentStateBuilder_ != null) {
return deploymentStateBuilder_.getMessageOrBuilder();
} else {
return deploymentState_ == null
? com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState
.getDefaultInstance()
: deploymentState_;
}
}
/**
*
*
* <pre>
* The state about the policy controller installation.
* </pre>
*
* <code>
* .google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState deployment_state = 2;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState,
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState.Builder,
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentStateOrBuilder>
getDeploymentStateFieldBuilder() {
if (deploymentStateBuilder_ == null) {
deploymentStateBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState,
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentState.Builder,
com.google.cloud.gkehub.configmanagement.v1.GatekeeperDeploymentStateOrBuilder>(
getDeploymentState(), getParentForChildren(), isClean());
deploymentState_ = null;
}
return deploymentStateBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.gkehub.configmanagement.v1.PolicyControllerState)
}
// @@protoc_insertion_point(class_scope:google.cloud.gkehub.configmanagement.v1.PolicyControllerState)
private static final com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState();
}
public static com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<PolicyControllerState> PARSER =
new com.google.protobuf.AbstractParser<PolicyControllerState>() {
@java.lang.Override
public PolicyControllerState parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<PolicyControllerState> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<PolicyControllerState> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.gkehub.configmanagement.v1.PolicyControllerState
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/directory-studio | 35,886 | plugins/ldapbrowser.ui/src/main/java/org/apache/directory/studio/ldapbrowser/ui/editors/schemabrowser/AttributeTypeDescriptionDetailsPage.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.studio.ldapbrowser.ui.editors.schemabrowser;
import java.util.Collection;
import org.apache.directory.api.ldap.model.schema.AttributeType;
import org.apache.directory.api.ldap.model.schema.LdapSyntax;
import org.apache.directory.api.ldap.model.schema.MatchingRule;
import org.apache.directory.api.ldap.model.schema.ObjectClass;
import org.apache.directory.api.ldap.model.schema.UsageEnum;
import org.apache.directory.studio.common.ui.CommonUIConstants;
import org.apache.directory.studio.common.ui.CommonUIPlugin;
import org.apache.directory.studio.ldapbrowser.core.model.schema.SchemaUtils;
import org.eclipse.osgi.util.NLS;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.events.ExpansionAdapter;
import org.eclipse.ui.forms.events.ExpansionEvent;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.Hyperlink;
import org.eclipse.ui.forms.widgets.ScrolledForm;
import org.eclipse.ui.forms.widgets.Section;
/**
* The AttributeTypeDescriptionDetailsPage displays the details of an
* attribute type description.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class AttributeTypeDescriptionDetailsPage extends SchemaDetailsPage
{
/** The main section, contains oid, names, desc and usage */
private Section mainSection;
/** The numeric oid field */
private Text numericOidText;
/** The names field */
private Text namesText;
/** The description field */
private Text descText;
/** The usage field */
private Text usageText;
/** The flag section, contains sv, obsolete, collective and read-only */
private Section flagSection;
/** The single-valued label */
private Label singleValuedLabel;
/** The obsolete label */
private Label isObsoleteLabel;
/** The collective label */
private Label collectiveLabel;
/** The no-user-modification label */
private Label noUserModificationLabel;
/** The syntax section, contains syntax description, lenth and a link to the syntax */
private Section syntaxSection;
/** The syntax description field */
private Text syntaxDescText;
/** The syntax length field */
private Text lengthText;
/** The link to the syntax */
private Hyperlink syntaxLink;
/** The matching rules section, contains links to matching rules */
private Section matchingRulesSection;
/** The link to the equality matching rule */
private Hyperlink equalityLink;
/** The link to the substring matching rule */
private Hyperlink substringLink;
/** The link to the ordering matching rule */
private Hyperlink orderingLink;
/** The section with other matching rules */
private Section otherMatchSection;
/** The section with links to object classes using the selected attribute as must */
private Section usedAsMustSection;
/** The section with links to object classes using the selected attribute as may */
private Section usedAsMaySection;
/** The section with a link to the superior attribute type */
private Section supertypeSection;
/** The section with links to the derived attribute types */
private Section subtypesSection;
/**
* Creates a new instance of AttributeTypeDescriptionDetailsPage.
*
* @param schemaPage the master schema page
* @param toolkit the toolkit used to create controls
*/
public AttributeTypeDescriptionDetailsPage( SchemaPage schemaPage, FormToolkit toolkit )
{
super( schemaPage, toolkit );
}
/**
* {@inheritDoc}
*/
protected void createContents( final ScrolledForm detailForm )
{
this.detailForm = detailForm;
detailForm.getBody().setLayout( new GridLayout() );
// create main section
mainSection = toolkit.createSection( detailForm.getBody(), SWT.NONE );
mainSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.Details" ) ); //$NON-NLS-1$
mainSection.marginWidth = 0;
mainSection.marginHeight = 0;
mainSection.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
toolkit.createCompositeSeparator( mainSection );
// create flag section
flagSection = toolkit.createSection( detailForm.getBody(), SWT.NONE );
flagSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.Flags" ) ); //$NON-NLS-1$
flagSection.marginWidth = 0;
flagSection.marginHeight = 0;
flagSection.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
toolkit.createCompositeSeparator( flagSection );
// create flags content
Composite flagClient = toolkit.createComposite( flagSection, SWT.WRAP );
GridLayout flagLayout = new GridLayout();
flagLayout.numColumns = 4;
flagLayout.marginWidth = 0;
flagLayout.marginHeight = 0;
flagClient.setLayout( flagLayout );
flagSection.setClient( flagClient );
singleValuedLabel = toolkit.createLabel( flagClient, Messages
.getString( "AttributeTypeDescriptionDetailsPage.SingleValued" ), SWT.CHECK ); //$NON-NLS-1$
singleValuedLabel.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
noUserModificationLabel = toolkit.createLabel( flagClient, Messages
.getString( "AttributeTypeDescriptionDetailsPage.ReadOnly" ), SWT.CHECK ); //$NON-NLS-1$
noUserModificationLabel.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
collectiveLabel = toolkit.createLabel( flagClient, Messages
.getString( "AttributeTypeDescriptionDetailsPage.Collective" ), SWT.CHECK ); //$NON-NLS-1$
collectiveLabel.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
isObsoleteLabel = toolkit.createLabel( flagClient, Messages
.getString( "AttributeTypeDescriptionDetailsPage.Obsolete" ), SWT.CHECK ); //$NON-NLS-1$
isObsoleteLabel.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
// create syntax section
syntaxSection = toolkit.createSection( detailForm.getBody(), SWT.NONE );
syntaxSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.Syntax" ) ); //$NON-NLS-1$
syntaxSection.marginWidth = 0;
syntaxSection.marginHeight = 0;
syntaxSection.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
toolkit.createCompositeSeparator( syntaxSection );
// create syntax content
Composite syntaxClient = toolkit.createComposite( syntaxSection, SWT.WRAP );
GridLayout syntaxLayout = new GridLayout();
syntaxLayout.numColumns = 2;
syntaxLayout.marginWidth = 0;
syntaxLayout.marginHeight = 0;
syntaxClient.setLayout( syntaxLayout );
syntaxSection.setClient( syntaxClient );
toolkit.createLabel( syntaxClient,
Messages.getString( "AttributeTypeDescriptionDetailsPage.SyntaxOID" ), SWT.NONE ); //$NON-NLS-1$
syntaxLink = toolkit.createHyperlink( syntaxClient, "", SWT.WRAP ); //$NON-NLS-1$
syntaxLink.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
syntaxLink.addHyperlinkListener( this );
toolkit.createLabel( syntaxClient,
Messages.getString( "AttributeTypeDescriptionDetailsPage.SyntaxDescription" ), SWT.NONE ); //$NON-NLS-1$
syntaxDescText = toolkit.createText( syntaxClient, "", SWT.NONE ); //$NON-NLS-1$
syntaxDescText.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
syntaxDescText.setEditable( false );
toolkit
.createLabel( syntaxClient, Messages.getString( "AttributeTypeDescriptionDetailsPage.Length" ), SWT.NONE ); //$NON-NLS-1$
lengthText = toolkit.createText( syntaxClient, "", SWT.NONE ); //$NON-NLS-1$
lengthText.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
lengthText.setEditable( false );
// create matching rules section
matchingRulesSection = toolkit.createSection( detailForm.getBody(), SWT.NONE );
matchingRulesSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.MatchingRules" ) ); //$NON-NLS-1$
matchingRulesSection.marginWidth = 0;
matchingRulesSection.marginHeight = 0;
matchingRulesSection.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
toolkit.createCompositeSeparator( matchingRulesSection );
// create matching rules content
Composite matchClient = toolkit.createComposite( matchingRulesSection, SWT.WRAP );
GridLayout matchLayout = new GridLayout();
matchLayout.numColumns = 2;
matchLayout.marginWidth = 0;
matchLayout.marginHeight = 0;
matchClient.setLayout( matchLayout );
matchingRulesSection.setClient( matchClient );
toolkit.createLabel( matchClient,
Messages.getString( "AttributeTypeDescriptionDetailsPage.EqualityMatch" ), SWT.NONE ); //$NON-NLS-1$
equalityLink = toolkit.createHyperlink( matchClient, "", SWT.WRAP ); //$NON-NLS-1$
equalityLink.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
equalityLink.addHyperlinkListener( this );
toolkit.createLabel( matchClient,
Messages.getString( "AttributeTypeDescriptionDetailsPage.SubstringMatch" ), SWT.NONE ); //$NON-NLS-1$
substringLink = toolkit.createHyperlink( matchClient, "", SWT.WRAP ); //$NON-NLS-1$
substringLink.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
substringLink.addHyperlinkListener( this );
toolkit.createLabel( matchClient,
Messages.getString( "AttributeTypeDescriptionDetailsPage.OrderingMatch" ), SWT.NONE ); //$NON-NLS-1$
orderingLink = toolkit.createHyperlink( matchClient, "", SWT.WRAP ); //$NON-NLS-1$
orderingLink.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
orderingLink.addHyperlinkListener( this );
// create other matching rules section
otherMatchSection = toolkit.createSection( detailForm.getBody(), Section.TWISTIE );
otherMatchSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.OtherMatchingRules" ) ); //$NON-NLS-1$
otherMatchSection.marginWidth = 0;
otherMatchSection.marginHeight = 0;
otherMatchSection.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
toolkit.createCompositeSeparator( otherMatchSection );
otherMatchSection.addExpansionListener( new ExpansionAdapter()
{
public void expansionStateChanged( ExpansionEvent e )
{
detailForm.reflow( true );
}
} );
// create used as must section
usedAsMustSection = toolkit.createSection( detailForm.getBody(), Section.TWISTIE );
usedAsMustSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.UsedAsMust" ) ); //$NON-NLS-1$
usedAsMustSection.marginWidth = 0;
usedAsMustSection.marginHeight = 0;
usedAsMustSection.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
toolkit.createCompositeSeparator( usedAsMustSection );
usedAsMustSection.addExpansionListener( new ExpansionAdapter()
{
public void expansionStateChanged( ExpansionEvent e )
{
detailForm.reflow( true );
}
} );
// create used as may section
usedAsMaySection = toolkit.createSection( detailForm.getBody(), Section.TWISTIE );
usedAsMaySection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.UsedAsMay" ) ); //$NON-NLS-1$
usedAsMaySection.marginWidth = 0;
usedAsMaySection.marginHeight = 0;
usedAsMaySection.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
toolkit.createCompositeSeparator( usedAsMaySection );
usedAsMaySection.addExpansionListener( new ExpansionAdapter()
{
public void expansionStateChanged( ExpansionEvent e )
{
detailForm.reflow( true );
}
} );
// create supertype section
supertypeSection = toolkit.createSection( detailForm.getBody(), Section.TWISTIE );
supertypeSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.Supertype" ) ); //$NON-NLS-1$
supertypeSection.marginWidth = 0;
supertypeSection.marginHeight = 0;
supertypeSection.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
toolkit.createCompositeSeparator( supertypeSection );
supertypeSection.addExpansionListener( new ExpansionAdapter()
{
public void expansionStateChanged( ExpansionEvent e )
{
detailForm.reflow( true );
}
} );
// create subtypes section
subtypesSection = toolkit.createSection( detailForm.getBody(), Section.TWISTIE );
subtypesSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.Subtypes" ) ); //$NON-NLS-1$
subtypesSection.marginWidth = 0;
subtypesSection.marginHeight = 0;
subtypesSection.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
toolkit.createCompositeSeparator( subtypesSection );
subtypesSection.addExpansionListener( new ExpansionAdapter()
{
public void expansionStateChanged( ExpansionEvent e )
{
detailForm.reflow( true );
}
} );
// create raw section
createRawSection();
}
/**
* {@inheritDoc}
*/
public void setInput( Object input )
{
AttributeType atd = null;
if ( input instanceof AttributeType )
{
atd = ( AttributeType ) input;
}
// create main content
createMainContent( atd );
// set flags
if ( ( atd != null ) && ( atd.isSingleValued() ) )
{
singleValuedLabel.setForeground( getColor( CommonUIConstants.DEFAULT_COLOR ) );
}
else
{
singleValuedLabel.setForeground( getColor( CommonUIConstants.DISABLED_COLOR ) );
}
if ( atd != null && atd.isObsolete() )
{
isObsoleteLabel.setForeground( getColor( CommonUIConstants.DEFAULT_COLOR ) );
}
else
{
isObsoleteLabel.setForeground( getColor( CommonUIConstants.DISABLED_COLOR ) );
}
if ( atd != null && atd.isCollective() )
{
collectiveLabel.setForeground( getColor( CommonUIConstants.DEFAULT_COLOR ) );
}
else
{
collectiveLabel.setForeground( getColor( CommonUIConstants.DISABLED_COLOR ) );
}
if ( atd != null && !atd.isUserModifiable() )
{
noUserModificationLabel.setForeground( getColor( CommonUIConstants.DEFAULT_COLOR ) );
}
else
{
noUserModificationLabel.setForeground( getColor( CommonUIConstants.DISABLED_COLOR ) );
}
flagSection.layout();
// set syntax content
String lsdOid = null;
LdapSyntax lsd = null;
long lsdLength = 0;
if ( atd != null )
{
lsdOid = SchemaUtils.getSyntaxNumericOidTransitive( atd, getSchema() );
if ( lsdOid != null && getSchema().hasLdapSyntaxDescription( lsdOid ) )
{
lsd = getSchema().getLdapSyntaxDescription( lsdOid );
}
lsdLength = SchemaUtils.getSyntaxLengthTransitive( atd, getSchema() );
}
syntaxLink.setText( getNonNullString( lsd != null ? lsd.getOid() : lsdOid ) );
syntaxLink.setHref( lsd );
syntaxLink.setUnderlined( lsd != null );
syntaxLink.setEnabled( lsd != null );
syntaxDescText.setText( getNonNullString( lsd != null ? lsd.getDescription() : null ) );
lengthText.setText( getNonNullString( lsdLength > 0 ? Long.toString( lsdLength ) : null ) );
syntaxSection.layout();
// set matching rules content
String emrOid = null;
MatchingRule emr = null;
if ( atd != null )
{
emrOid = SchemaUtils.getEqualityMatchingRuleNameOrNumericOidTransitive( atd, getSchema() );
if ( emrOid != null && getSchema().hasMatchingRuleDescription( emrOid ) )
{
emr = getSchema().getMatchingRuleDescription( emrOid );
}
}
equalityLink.setText( getNonNullString( emr != null ? SchemaUtils.toString( emr ) : emrOid ) );
equalityLink.setHref( emr );
equalityLink.setUnderlined( emr != null );
equalityLink.setEnabled( emr != null );
String smrOid = null;
MatchingRule smr = null;
if ( atd != null )
{
smrOid = SchemaUtils.getSubstringMatchingRuleNameOrNumericOidTransitive( atd, getSchema() );
if ( smrOid != null && getSchema().hasMatchingRuleDescription( smrOid ) )
{
smr = getSchema().getMatchingRuleDescription( smrOid );
}
}
substringLink.setText( getNonNullString( smr != null ? SchemaUtils.toString( smr ) : smrOid ) );
substringLink.setHref( smr );
substringLink.setUnderlined( smr != null );
substringLink.setEnabled( smr != null );
String omrOid = null;
MatchingRule omr = null;
if ( atd != null )
{
omrOid = SchemaUtils.getOrderingMatchingRuleNameOrNumericOidTransitive( atd, getSchema() );
if ( omrOid != null && getSchema().hasMatchingRuleDescription( omrOid ) )
{
omr = getSchema().getMatchingRuleDescription( omrOid );
}
}
orderingLink.setText( getNonNullString( omr != null ? SchemaUtils.toString( omr ) : omrOid ) );
orderingLink.setHref( omr );
orderingLink.setUnderlined( omr != null );
orderingLink.setEnabled( omr != null );
matchingRulesSection.layout();
// create contents of dynamic sections
createOtherMatchContent( atd );
createUsedAsMustContent( atd );
createUsedAsMayContent( atd );
createSupertypeContent( atd );
createSubtypesContent( atd );
createRawContents( atd );
detailForm.reflow( true );
}
private Color getColor( String color )
{
return CommonUIPlugin.getDefault().getColor( color );
}
/**
* Creates the content of the main section. It is newly created
* on every input change to ensure a proper layout of
* multilined descriptions.
*
* @param atd the attribute type description
*/
private void createMainContent( AttributeType atd )
{
// dispose old content
if ( mainSection.getClient() != null )
{
mainSection.getClient().dispose();
}
// create new client
Composite mainClient = toolkit.createComposite( mainSection, SWT.WRAP );
GridLayout mainLayout = new GridLayout( 2, false );
mainClient.setLayout( mainLayout );
mainSection.setClient( mainClient );
// create new content
if ( atd != null )
{
toolkit.createLabel( mainClient,
Messages.getString( "AttributeTypeDescriptionDetailsPage.NumericOID" ), SWT.NONE ); //$NON-NLS-1$
numericOidText = toolkit.createText( mainClient, getNonNullString( atd.getOid() ), SWT.NONE );
numericOidText.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
numericOidText.setEditable( false );
toolkit.createLabel( mainClient,
Messages.getString( "AttributeTypeDescriptionDetailsPage.AttributeNames" ), SWT.NONE ); //$NON-NLS-1$
namesText = toolkit.createText( mainClient, getNonNullString( SchemaUtils.toString( atd ) ), SWT.NONE );
namesText.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
namesText.setEditable( false );
toolkit.createLabel( mainClient,
Messages.getString( "AttributeTypeDescriptionDetailsPage.Description" ), SWT.WRAP ); //$NON-NLS-1$
descText = toolkit.createText( mainClient, getNonNullString( atd.getDescription() ), SWT.WRAP | SWT.MULTI );
GridData gd = new GridData( GridData.FILL_HORIZONTAL );
gd.widthHint = detailForm.getForm().getSize().x - 100 - 60;
descText.setLayoutData( gd );
descText.setEditable( false );
toolkit.createLabel( mainClient,
Messages.getString( "AttributeTypeDescriptionDetailsPage.Usage" ), SWT.NONE ); //$NON-NLS-1$
usageText = toolkit.createText( mainClient, getNonNullString( UsageEnum.render( atd.getUsage() ) ),
SWT.NONE );
usageText.setLayoutData( new GridData( GridData.GRAB_HORIZONTAL ) );
usageText.setEditable( false );
}
mainSection.layout();
}
/**
* Creates the content of the other matching rules section.
* It is newly created on every input change because the content
* of this section is dynamic.
*
* @param atd the attribute type description
*/
private void createOtherMatchContent( AttributeType atd )
{
// dispose old content
if ( otherMatchSection.getClient() != null )
{
otherMatchSection.getClient().dispose();
}
// create new client
Composite otherMatchClient = toolkit.createComposite( otherMatchSection, SWT.WRAP );
otherMatchClient.setLayout( new GridLayout() );
otherMatchSection.setClient( otherMatchClient );
// create new content, either links to other matching rules
// or a dash if no other matching rules exist.
if ( atd != null )
{
Collection<String> otherMrdNames = SchemaUtils.getOtherMatchingRuleDescriptionNames( atd, getSchema() );
if ( otherMrdNames != null && otherMrdNames.size() > 0 )
{
otherMatchSection
.setText( NLS
.bind(
Messages.getString( "AttributeTypeDescriptionDetailsPage.OtherMatchingRulesCount" ), new Object[] { otherMrdNames.size() } ) ); //$NON-NLS-1$
for ( String mrdName : otherMrdNames )
{
if ( getSchema().hasMatchingRuleDescription( mrdName ) )
{
MatchingRule mrd = getSchema().getMatchingRuleDescription( mrdName );
Hyperlink otherMatchLink = toolkit.createHyperlink( otherMatchClient, SchemaUtils
.toString( mrd ), SWT.WRAP );
otherMatchLink.setHref( mrd );
otherMatchLink.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
otherMatchLink.setUnderlined( true );
otherMatchLink.setEnabled( true );
otherMatchLink.addHyperlinkListener( this );
}
else
{
Hyperlink otherMatchLink = toolkit.createHyperlink( otherMatchClient, mrdName, SWT.WRAP );
otherMatchLink.setHref( null );
otherMatchLink.setUnderlined( false );
otherMatchLink.setEnabled( false );
}
}
}
else
{
otherMatchSection.setText( NLS.bind( Messages
.getString( "AttributeTypeDescriptionDetailsPage.OtherMatchingRulesCount" ), new Object[] { 0 } ) ); //$NON-NLS-1$
Text otherText = toolkit.createText( otherMatchClient, getNonNullString( null ), SWT.NONE );
otherText.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
otherText.setEditable( false );
}
}
else
{
otherMatchSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.OtherMatchingRules" ) ); //$NON-NLS-1$
}
otherMatchSection.layout();
}
/**
* Creates the content of the supertype section.
* It is newly created on every input change because the content
* of this section is dynamic.
*
* @param atd the attribute type description
*/
private void createSupertypeContent( AttributeType atd )
{
// dispose old content
if ( supertypeSection.getClient() != null )
{
supertypeSection.getClient().dispose();
}
// create new client
Composite superClient = toolkit.createComposite( supertypeSection, SWT.WRAP );
superClient.setLayout( new GridLayout() );
supertypeSection.setClient( superClient );
// create new content, either a link to the superior attribute type
// or a dash if no supertype exists.
if ( atd != null )
{
String superType = atd.getSuperiorOid();
if ( superType != null )
{
supertypeSection.setText( NLS.bind( Messages
.getString( "AttributeTypeDescriptionDetailsPage.SupertypeCount" ), new Object[] { 1 } ) ); //$NON-NLS-1$
if ( getSchema().hasAttributeTypeDescription( superType ) )
{
AttributeType supAtd = getSchema().getAttributeTypeDescription( superType );
Hyperlink superLink = toolkit.createHyperlink( superClient, SchemaUtils.toString( supAtd ),
SWT.WRAP );
superLink.setHref( supAtd );
superLink.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
superLink.setUnderlined( true );
superLink.setEnabled( true );
superLink.addHyperlinkListener( this );
}
else
{
Hyperlink superLink = toolkit.createHyperlink( superClient, superType, SWT.WRAP );
superLink.setHref( null );
superLink.setUnderlined( false );
superLink.setEnabled( false );
}
}
else
{
supertypeSection.setText( NLS.bind( Messages
.getString( "AttributeTypeDescriptionDetailsPage.SupertypeCount" ), new Object[] { 0 } ) ); //$NON-NLS-1$
Text supText = toolkit.createText( superClient, getNonNullString( null ), SWT.NONE );
supText.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
supText.setEditable( false );
}
}
else
{
supertypeSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.Supertype" ) ); //$NON-NLS-1$
}
supertypeSection.layout();
}
/**
* Creates the content of the subtypes.
* It is newly created on every input change because the content
* of this section is dynamic.
*
* @param atd the attribute type description
*/
private void createSubtypesContent( AttributeType atd )
{
// dispose old content
if ( subtypesSection.getClient() != null )
{
subtypesSection.getClient().dispose();
}
// create new client
Composite subClient = toolkit.createComposite( subtypesSection, SWT.WRAP );
subClient.setLayout( new GridLayout() );
subtypesSection.setClient( subClient );
// create new content, either links to subtypes or a dash if no subtypes exist.
if ( atd != null )
{
Collection<AttributeType> derivedAtds = SchemaUtils.getDerivedAttributeTypeDescriptions( atd,
getSchema() );
if ( derivedAtds != null && derivedAtds.size() > 0 )
{
subtypesSection
.setText( NLS
.bind(
Messages.getString( "AttributeTypeDescriptionDetailsPage.SubtypesCount" ), new Object[] { derivedAtds.size() } ) ); //$NON-NLS-1$
for ( AttributeType derivedAtd : derivedAtds )
{
Hyperlink subAttributeTypeLink = toolkit.createHyperlink( subClient, SchemaUtils
.toString( derivedAtd ), SWT.WRAP );
subAttributeTypeLink.setHref( derivedAtd );
subAttributeTypeLink.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
subAttributeTypeLink.setUnderlined( true );
subAttributeTypeLink.setEnabled( true );
subAttributeTypeLink.addHyperlinkListener( this );
}
}
else
{
subtypesSection.setText( NLS.bind( Messages
.getString( "AttributeTypeDescriptionDetailsPage.SubtypesCount" ), new Object[] { 0 } ) ); //$NON-NLS-1$
Text subText = toolkit.createText( subClient, getNonNullString( null ), SWT.NONE );
subText.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
subText.setEditable( false );
}
}
else
{
subtypesSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.Subtypes" ) ); //$NON-NLS-1$
}
subtypesSection.layout();
}
/**
* Creates the content of the used as must section.
* It is newly created on every input change because the content
* of this section is dynamic.
*
* @param atd the attribute type description
*/
private void createUsedAsMustContent( AttributeType atd )
{
// dispose old content
if ( usedAsMustSection.getClient() != null )
{
usedAsMustSection.getClient().dispose();
}
// create new client
Composite mustClient = toolkit.createComposite( usedAsMustSection, SWT.WRAP );
mustClient.setLayout( new GridLayout() );
usedAsMustSection.setClient( mustClient );
// create new content, either links to objectclasses or a dash
if ( atd != null )
{
Collection<ObjectClass> usedAsMusts = SchemaUtils.getUsedAsMust( atd, getSchema() );
if ( usedAsMusts != null && usedAsMusts.size() > 0 )
{
usedAsMustSection
.setText( NLS
.bind(
Messages.getString( "AttributeTypeDescriptionDetailsPage.UsedAsMustCount" ), new Object[] { usedAsMusts.size() } ) ); //$NON-NLS-1$
for ( ObjectClass ocd : usedAsMusts )
{
Hyperlink usedAsMustLink = toolkit.createHyperlink( mustClient, SchemaUtils.toString( ocd ),
SWT.WRAP );
usedAsMustLink.setHref( ocd );
usedAsMustLink.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
usedAsMustLink.setUnderlined( true );
usedAsMustLink.setEnabled( true );
usedAsMustLink.addHyperlinkListener( this );
}
}
else
{
usedAsMustSection.setText( NLS.bind( Messages
.getString( "AttributeTypeDescriptionDetailsPage.UsedAsMustCount" ), new Object[] { 0 } ) ); //$NON-NLS-1$
Text mustText = toolkit.createText( mustClient, getNonNullString( null ), SWT.NONE );
mustText.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
mustText.setEditable( false );
}
}
else
{
usedAsMustSection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.UsedAsMust" ) ); //$NON-NLS-1$
}
usedAsMustSection.layout();
}
/**
* Creates the content of the used as may section.
* It is newly created on every input change because the content
* of this section is dynamic.
*
* @param atd the attribute type description
*/
private void createUsedAsMayContent( AttributeType atd )
{
// dispose old content
if ( usedAsMaySection.getClient() != null )
{
usedAsMaySection.getClient().dispose();
}
// create new client
Composite mayClient = toolkit.createComposite( usedAsMaySection, SWT.WRAP );
mayClient.setLayout( new GridLayout() );
usedAsMaySection.setClient( mayClient );
// create new content, either links to objectclasses or a dash
if ( atd != null )
{
Collection<ObjectClass> usedAsMays = SchemaUtils.getUsedAsMay( atd, getSchema() );
if ( usedAsMays != null && usedAsMays.size() > 0 )
{
usedAsMaySection
.setText( NLS
.bind(
Messages.getString( "AttributeTypeDescriptionDetailsPage.UsedAsMayCount" ), new Object[] { usedAsMays.size() } ) ); //$NON-NLS-1$
for ( ObjectClass ocd : usedAsMays )
{
Hyperlink usedAsMayLink = toolkit
.createHyperlink( mayClient, SchemaUtils.toString( ocd ), SWT.WRAP );
usedAsMayLink.setHref( ocd );
usedAsMayLink.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
usedAsMayLink.setUnderlined( true );
usedAsMayLink.setEnabled( true );
usedAsMayLink.addHyperlinkListener( this );
}
}
else
{
usedAsMaySection.setText( NLS.bind( Messages
.getString( "AttributeTypeDescriptionDetailsPage.UsedAsMayCount" ), new Object[] { 0 } ) ); //$NON-NLS-1$
Text mayText = toolkit.createText( mayClient, getNonNullString( null ), SWT.NONE );
mayText.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
mayText.setEditable( false );
}
}
else
{
usedAsMaySection.setText( Messages.getString( "AttributeTypeDescriptionDetailsPage.UsedAsMay" ) ); //$NON-NLS-1$
}
usedAsMaySection.layout();
}
}
|
apache/hadoop | 35,605 | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesDelegationTokens.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import static org.apache.hadoop.yarn.server.resourcemanager.webapp.TestWebServiceUtil.createRM;
import static org.apache.hadoop.yarn.webapp.WebServicesTestUtils.assertResponseStatusCode;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.security.Principal;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Callable;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.core.Application;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.client.Entity;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.KerberosTestUtils;
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
import org.apache.hadoop.security.authentication.server.PseudoAuthenticationHandler;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.util.XMLUtils;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.DelegationToken;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.JerseyTestBase;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.MethodSource;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.apache.hadoop.yarn.server.resourcemanager.webapp.TestWebServiceUtil.toJson;
import static org.apache.hadoop.yarn.server.resourcemanager.webapp.TestWebServiceUtil.toEntity;
import org.glassfish.jersey.internal.inject.AbstractBinder;
import org.glassfish.jersey.jettison.JettisonFeature;
import org.glassfish.jersey.logging.LoggingFeature;
import org.glassfish.jersey.server.ResourceConfig;
public class TestRMWebServicesDelegationTokens extends JerseyTestBase {
private static File testRootDir;
private static File httpSpnegoKeytabFile = new File(
KerberosTestUtils.getKeytabFile());
private static String httpSpnegoPrincipal = KerberosTestUtils
.getServerPrincipal();
private static MiniKdc testMiniKDC;
private static MockRM rm;
private boolean isKerberosAuth = false;
private ResourceConfig config;
private HttpServletRequest request = mock(HttpServletRequest.class);
@Override
protected Application configure() {
config = new ResourceConfig();
config.register(RMWebServices.class);
config.register(GenericExceptionHandler.class);
config.register(TestRMWebServicesAppsModification.TestRMCustomAuthFilter.class);
config.register(new JettisonFeature()).register(JAXBContextResolver.class);
return config;
}
private class JerseyBinder extends AbstractBinder {
private Configuration conf = new YarnConfiguration();
@Override
protected void configure() {
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS,
YarnConfiguration.DEFAULT_RM_AM_MAX_ATTEMPTS);
conf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class,
ResourceScheduler.class);
conf.setBoolean(YarnConfiguration.YARN_ACL_ENABLE, true);
rm = createRM(conf);
configureScheduler();
when(request.getScheme()).thenReturn("http");
final HttpServletResponse response = mock(HttpServletResponse.class);
bind(rm).to(ResourceManager.class).named("rm");
bind(conf).to(Configuration.class).named("conf");
bind(request).to(HttpServletRequest.class);
bind(response).to(HttpServletResponse.class);
Principal principal1 = () -> "testuser";
when(request.getUserPrincipal()).thenReturn(principal1);
if (isKerberosAuth) {
bind(TestKerberosAuthFilter.class);
} else {
bind(TestSimpleAuthFilter.class);
}
}
public void configureScheduler() {
}
public Configuration getConf() {
return conf;
}
}
// Make sure the test uses the published header string
final String yarnTokenHeader = "Hadoop-YARN-RM-Delegation-Token";
public static class TestKerberosAuthFilter extends AuthenticationFilter {
@Override
protected Properties getConfiguration(String configPrefix,
FilterConfig filterConfig) throws ServletException {
Properties properties =
super.getConfiguration(configPrefix, filterConfig);
properties.put(KerberosAuthenticationHandler.PRINCIPAL,
httpSpnegoPrincipal);
properties.put(KerberosAuthenticationHandler.KEYTAB,
httpSpnegoKeytabFile.getAbsolutePath());
properties.put(AuthenticationFilter.AUTH_TYPE, "kerberos");
return properties;
}
}
public static class TestSimpleAuthFilter extends AuthenticationFilter {
@Override
protected Properties getConfiguration(String configPrefix,
FilterConfig filterConfig) throws ServletException {
Properties properties =
super.getConfiguration(configPrefix, filterConfig);
properties.put(KerberosAuthenticationHandler.PRINCIPAL,
httpSpnegoPrincipal);
properties.put(KerberosAuthenticationHandler.KEYTAB,
httpSpnegoKeytabFile.getAbsolutePath());
properties.put(AuthenticationFilter.AUTH_TYPE, "simple");
properties.put(PseudoAuthenticationHandler.ANONYMOUS_ALLOWED, "false");
return properties;
}
}
private class SimpleAuth extends JerseyBinder {
@Override
public void configureScheduler() {
isKerberosAuth = false;
getConf().set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
}
}
private class KerberosAuth extends JerseyBinder {
@Override
public void configureScheduler() {
isKerberosAuth = true;
getConf().set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
getConf().set(YarnConfiguration.RM_WEBAPP_SPNEGO_USER_NAME_KEY, httpSpnegoPrincipal);
getConf().set(YarnConfiguration.RM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,
httpSpnegoKeytabFile.getAbsolutePath());
getConf().set(YarnConfiguration.NM_WEBAPP_SPNEGO_USER_NAME_KEY,
httpSpnegoPrincipal);
getConf().set(YarnConfiguration.NM_WEBAPP_SPNEGO_KEYTAB_FILE_KEY,
httpSpnegoKeytabFile.getAbsolutePath());
}
}
public static Collection<Object[]> guiceConfigs() {
return Arrays.asList(new Object[][]{{0}, {1}});
}
public void initTestRMWebServicesDelegationTokens(int run) throws Exception {
switch (run) {
case 0:
default:
config.register(new KerberosAuth());
break;
case 1:
config.register(new SimpleAuth());
break;
}
setUp();
}
@BeforeAll
public static void setupKDC() throws Exception {
testRootDir = new File("target",
TestRMWebServicesDelegationTokens.class.getName() + "-root");
testMiniKDC = new MiniKdc(MiniKdc.createConf(), testRootDir);
testMiniKDC.start();
testMiniKDC.createPrincipal(httpSpnegoKeytabFile, "HTTP/localhost",
"client", "client2", "client3");
}
@Override
public void setUp() throws Exception {
super.setUp();
httpSpnegoKeytabFile.deleteOnExit();
testRootDir.deleteOnExit();
Configuration conf = new Configuration();
conf.set("hadoop.security.authentication", "kerberos");
UserGroupInformation.setConfiguration(conf);
}
@AfterAll
public static void shutdownKdc() {
if (testMiniKDC != null) {
testMiniKDC.stop();
}
}
@AfterEach
@Override
public void tearDown() throws Exception {
if (rm != null) {
rm.stop();
}
super.tearDown();
UserGroupInformation.setConfiguration(new Configuration());
}
// Simple test - try to create a delegation token via web services and check
// to make sure we get back a valid token. Validate token using RM function
// calls. It should only succeed with the kerberos filter
@MethodSource("guiceConfigs")
@ParameterizedTest
public void testCreateDelegationToken(int run) throws Exception {
initTestRMWebServicesDelegationTokens(run);
rm.start();
final String renewer = "test-renewer";
DelegationToken token = new DelegationToken();
token.setRenewer(renewer);
String jsonBody = toJson(token, DelegationToken.class);
String xmlBody =
"<delegation-token><renewer>" + renewer
+ "</renewer></delegation-token>";
String[] mediaTypes =
{MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML};
Map<String, String> bodyMap = new HashMap<>();
bodyMap.put(MediaType.APPLICATION_JSON, jsonBody);
bodyMap.put(MediaType.APPLICATION_XML, xmlBody);
for (final String mediaType : mediaTypes) {
final String body = bodyMap.get(mediaType);
for (final String contentType : mediaTypes) {
if (isKerberosAuth) {
when(request.getAuthType()).thenReturn("Kerberos");
verifyKerberosAuthCreate(mediaType, contentType, body, renewer);
} else {
verifySimpleAuthCreate(mediaType, contentType, body);
}
}
}
rm.stop();
return;
}
private void verifySimpleAuthCreate(String mediaType, String contentType,
String body) {
Response response =
target().path("ws").path("v1").path("cluster")
.path("delegation-token").queryParam("user.name", "testuser")
.request(contentType)
.post(Entity.entity(body, MediaType.valueOf(mediaType)), Response.class);
assertResponseStatusCode(Response.Status.FORBIDDEN, response.getStatusInfo());
}
private void verifyKerberosAuthCreate(String mType, String cType,
String reqBody, String renUser) throws Exception {
final String mediaType = mType;
final String contentType = cType;
final String body = reqBody;
final String renewer = renUser;
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
Principal principal1 = () -> "client@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request(contentType)
.post(Entity.entity(body, MediaType.valueOf(mediaType)), Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
DelegationToken tok = getDelegationTokenFromResponse(response);
assertFalse(tok.getToken().isEmpty());
Token<RMDelegationTokenIdentifier> token =
new Token<RMDelegationTokenIdentifier>();
token.decodeFromUrlString(tok.getToken());
assertEquals(renewer, token.decodeIdentifier().getRenewer().toString());
assertValidRMToken(tok.getToken());
DelegationToken dtoken = new DelegationToken();
response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request(contentType)
.post(Entity.entity(toEntity(dtoken,
DelegationToken.class, mediaType), mediaType), Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
tok = getDelegationTokenFromResponse(response);
assertFalse(tok.getToken().isEmpty());
token = new Token<RMDelegationTokenIdentifier>();
token.decodeFromUrlString(tok.getToken());
assertEquals("", token.decodeIdentifier().getRenewer().toString());
assertValidRMToken(tok.getToken());
return null;
}
});
}
// Test to verify renew functionality - create a token and then try to renew
// it. The renewer should succeed; owner and third user should fail
@MethodSource("guiceConfigs")
@ParameterizedTest
public void testRenewDelegationToken(int run) throws Exception {
initTestRMWebServicesDelegationTokens(run);
this.client().register(new LoggingFeature());
rm.start();
final String renewer = "client2";
final DelegationToken dummyToken = new DelegationToken();
dummyToken.setRenewer(renewer);
String[] mediaTypes =
{ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML };
for (final String mediaType : mediaTypes) {
for (final String contentType : mediaTypes) {
if (!isKerberosAuth) {
verifySimpleAuthRenew(mediaType, contentType);
continue;
}
if(isKerberosAuth) {
when(request.getAuthType()).thenReturn("Kerberos");
}
// test "client" and client2" trying to renew "client" token
final DelegationToken responseToken =
KerberosTestUtils.doAsClient(new Callable<DelegationToken>() {
@Override
public DelegationToken call() throws Exception {
Principal principal1 = () -> "client@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request(contentType)
.post(Entity.entity(toEntity(dummyToken,
DelegationToken.class, mediaType), mediaType), Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
DelegationToken tok = getDelegationTokenFromResponse(response);
assertFalse(tok.getToken().isEmpty());
String body = generateRenewTokenBody(mediaType, tok.getToken());
when(request.getHeader(yarnTokenHeader)).thenReturn(tok.getToken());
response =
target().path("ws").path("v1").path("cluster")
.path("delegation-token").path("expiration").request(contentType)
.header(yarnTokenHeader, tok.getToken())
.post(Entity.entity(body, mediaType), Response.class);
assertResponseStatusCode(Response.Status.FORBIDDEN,
response.getStatusInfo());
return tok;
}
});
KerberosTestUtils.doAs(renewer, new Callable<DelegationToken>() {
@Override
public DelegationToken call() throws Exception {
Principal principal1 = () -> "client2@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
when(request.getHeader(yarnTokenHeader)).thenReturn(responseToken.getToken());
// renew twice so that we can confirm that the
// expiration time actually changes
long oldExpirationTime = Time.now();
assertValidRMToken(responseToken.getToken());
String body =
generateRenewTokenBody(mediaType, responseToken.getToken());
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").path("expiration")
.request(contentType).header(yarnTokenHeader, responseToken.getToken())
.post(Entity.entity(body, mediaType), Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
DelegationToken tok = getDelegationTokenFromResponse(response);
String message =
"Expiration time not as expected: old = " + oldExpirationTime
+ "; new = " + tok.getNextExpirationTime();
assertTrue(tok.getNextExpirationTime() > oldExpirationTime, message);
oldExpirationTime = tok.getNextExpirationTime();
// artificial sleep to ensure we get a different expiration time
Thread.sleep(1000);
response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").path("expiration")
.request(contentType).header(yarnTokenHeader, responseToken.getToken())
.post(Entity.entity(body, mediaType), Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
tok = getDelegationTokenFromResponse(response);
message =
"Expiration time not as expected: old = " + oldExpirationTime
+ "; new = " + tok.getNextExpirationTime();
assertTrue(tok.getNextExpirationTime() > oldExpirationTime, message);
return tok;
}
});
// test unauthorized user renew attempt
KerberosTestUtils.doAs("client3", new Callable<DelegationToken>() {
@Override
public DelegationToken call() throws Exception {
Principal principal1 = () -> "client3@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
String body =
generateRenewTokenBody(mediaType, responseToken.getToken());
Response response =
target().path("ws").path("v1").path("cluster")
.path("delegation-token").path("expiration").request(contentType)
.header(yarnTokenHeader, responseToken.getToken())
.post(Entity.entity(body, mediaType), Response.class);
assertResponseStatusCode(Response.Status.FORBIDDEN,
response.getStatusInfo());
return null;
}
});
// test bad request - incorrect format, empty token string and random
// token string
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
String token = "TEST_TOKEN_STRING";
String body = "";
if (mediaType.equals(MediaType.APPLICATION_JSON)) {
DelegationToken dToken = new DelegationToken();
dToken.setToken(token);
body = toJson(dToken, DelegationToken.class);
} else {
body =
"<delegation-token><token>" + token
+ "</token></delegation-token>";
}
// missing token header
when(request.getHeader(yarnTokenHeader)).thenReturn(null);
Response response =
target().path("ws").path("v1").path("cluster")
.path("delegation-token").path("expiration").request()
.post(Entity.entity(body, MediaType.valueOf(mediaType)), Response.class);
assertResponseStatusCode(Response.Status.BAD_REQUEST,
response.getStatusInfo());
return null;
}
});
}
}
rm.stop();
}
private void verifySimpleAuthRenew(String mediaType, String contentType) throws Exception {
String token = "TEST_TOKEN_STRING";
String body = "";
// contents of body don't matter because the request processing shouldn't
// get that far
if (mediaType.equals(MediaType.APPLICATION_JSON)) {
DelegationToken delegationToken = new DelegationToken();
body = "{\"token\": \"" + token + "\" }";
delegationToken.setToken("test-123");
body = toJson(delegationToken, DelegationToken.class);
} else {
body =
"<delegation-token><token>" + token + "</token></delegation-token>";
body = "<delegation-token><xml>abcd</xml></delegation-token>";
}
Response response =
target().path("ws").path("v1").path("cluster")
.path("delegation-token").queryParam("user.name", "testuser")
.request(contentType)
.post(Entity.entity(body, mediaType), Response.class);
assertResponseStatusCode(Response.Status.FORBIDDEN, response.getStatusInfo());
}
// Test to verify cancel functionality - create a token and then try to cancel
// it. The owner and renewer should succeed; third user should fail
@MethodSource("guiceConfigs")
@ParameterizedTest
public void testCancelDelegationToken(int run) throws Exception {
initTestRMWebServicesDelegationTokens(run);
rm.start();
if (isKerberosAuth == false) {
verifySimpleAuthCancel();
return;
}
final DelegationToken dtoken = new DelegationToken();
String renewer = "client2";
dtoken.setRenewer(renewer);
String[] mediaTypes =
{MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML};
when(request.getAuthType()).thenReturn("Kerberos");
for (final String mediaType : mediaTypes) {
for (final String contentType : mediaTypes) {
// owner should be able to cancel delegation token
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
Principal principal1 = () -> "client@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request(contentType)
.post(Entity.entity(toEntity(dtoken, DelegationToken.class, mediaType),
mediaType), Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
DelegationToken tok = getDelegationTokenFromResponse(response);
when(request.getHeader(yarnTokenHeader)).thenReturn(tok.getToken());
response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request(contentType)
.header(yarnTokenHeader, tok.getToken())
.delete(Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
assertTokenCancelled(tok.getToken());
return null;
}
});
// renewer should be able to cancel token
final DelegationToken tmpToken =
KerberosTestUtils.doAsClient(new Callable<DelegationToken>() {
@Override
public DelegationToken call() throws Exception {
Principal principal1 = () -> "client@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster").
path("delegation-token").request(contentType).
post(Entity.entity(toEntity(dtoken, DelegationToken.class, mediaType),
mediaType), Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
DelegationToken tok = getDelegationTokenFromResponse(response);
return tok;
}
});
KerberosTestUtils.doAs(renewer, new Callable<Void>() {
@Override
public Void call() throws Exception {
Principal principal1 = () -> "client2@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
when(request.getHeader(yarnTokenHeader)).thenReturn(tmpToken.getToken());
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request()
.header(yarnTokenHeader, tmpToken.getToken())
.accept(contentType).delete(Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
assertTokenCancelled(tmpToken.getToken());
return null;
}
});
// third user should not be able to cancel token
final DelegationToken tmpToken2 =
KerberosTestUtils.doAsClient(new Callable<DelegationToken>() {
@Override
public DelegationToken call() throws Exception {
Principal principal1 = () -> "client@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster").
path("delegation-token").request(contentType).
post(Entity.entity(toEntity(dtoken, DelegationToken.class, mediaType),
mediaType), Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
DelegationToken tok = getDelegationTokenFromResponse(response);
return tok;
}
});
KerberosTestUtils.doAs("client3", new Callable<Void>() {
@Override
public Void call() throws Exception {
Principal principal1 = () -> "client3@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
when(request.getHeader(yarnTokenHeader)).thenReturn(tmpToken2.getToken());
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster").
path("delegation-token").request().
header(yarnTokenHeader, tmpToken2.getToken()).
accept(contentType).delete(Response.class);
assertResponseStatusCode(Response.Status.FORBIDDEN,
response.getStatusInfo());
assertValidRMToken(tmpToken2.getToken());
return null;
}
});
testCancelTokenBadRequests(mediaType, contentType);
}
}
rm.stop();
return;
}
private void testCancelTokenBadRequests(String mType, String cType)
throws Exception {
final String mediaType = mType;
final String contentType = cType;
final DelegationToken dtoken = new DelegationToken();
String renewer = "client2";
dtoken.setRenewer(renewer);
// bad request(invalid header value)
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
Principal principal1 = () -> "client@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
when(request.getHeader(yarnTokenHeader)).thenReturn("random-string");
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request(contentType)
.header(yarnTokenHeader, "random-string")
.delete(Response.class);
assertResponseStatusCode(Response.Status.BAD_REQUEST, response.getStatusInfo());
return null;
}
});
// bad request(missing header)
KerberosTestUtils.doAsClient(new Callable<Void>() {
@Override
public Void call() throws Exception {
Principal principal1 = () -> "client@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request(contentType)
.delete(Response.class);
assertResponseStatusCode(Response.Status.BAD_REQUEST, response.getStatusInfo());
return null;
}
});
// bad request(cancelled token)
final DelegationToken tmpToken =
KerberosTestUtils.doAsClient(new Callable<DelegationToken>() {
@Override
public DelegationToken call() throws Exception {
Principal principal1 = () -> "client@EXAMPLE.COM";
when(request.getUserPrincipal()).thenReturn(principal1);
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request(contentType)
.post(Entity.entity(toEntity(dtoken, DelegationToken.class, mediaType),
mediaType), Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
DelegationToken tok = getDelegationTokenFromResponse(response);
return tok;
}
});
KerberosTestUtils.doAs(renewer, new Callable<Void>() {
@Override
public Void call() throws Exception {
Principal principal1 = () -> "client2@EXAMPLE.COM";
when(request.getHeader(yarnTokenHeader)).thenReturn(tmpToken.getToken());
when(request.getUserPrincipal()).thenReturn(principal1);
Response response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request(contentType)
.header(yarnTokenHeader, tmpToken.getToken())
.delete(Response.class);
assertResponseStatusCode(Response.Status.OK, response.getStatusInfo());
response =
targetWithJsonObject().path("ws").path("v1").path("cluster")
.path("delegation-token").request(contentType)
.header(yarnTokenHeader, tmpToken.getToken())
.delete(Response.class);
assertResponseStatusCode(Response.Status.BAD_REQUEST, response.getStatusInfo());
return null;
}
});
}
private void verifySimpleAuthCancel() {
// contents of header don't matter; request should never get that far
Response response =
target().path("ws").path("v1").path("cluster")
.path("delegation-token").queryParam("user.name", "testuser")
.request()
.header(RMWebServices.DELEGATION_TOKEN_HEADER, "random")
.delete(Response.class);
assertResponseStatusCode(Response.Status.FORBIDDEN, response.getStatusInfo());
}
private DelegationToken
getDelegationTokenFromResponse(Response response)
throws IOException, ParserConfigurationException, SAXException,
JSONException {
if (response.getMediaType().toString().contains(MediaType.APPLICATION_JSON)) {
return getDelegationTokenFromJson(
response.readEntity(JSONObject.class).getJSONObject("delegation-token"));
}
return getDelegationTokenFromXML(response.readEntity(String.class));
}
public static DelegationToken getDelegationTokenFromXML(String tokenXML)
throws IOException, ParserConfigurationException, SAXException {
DocumentBuilderFactory dbf = XMLUtils.newSecureDocumentBuilderFactory();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(tokenXML));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("delegation-token");
assertEquals(1, nodes.getLength(), "incorrect number of elements");
Element element = (Element) nodes.item(0);
DelegationToken ret = new DelegationToken();
String token = WebServicesTestUtils.getXmlString(element, "token");
if (token != null) {
ret.setToken(token);
} else {
long expiration =
WebServicesTestUtils.getXmlLong(element, "expiration-time");
ret.setNextExpirationTime(expiration);
}
return ret;
}
public static DelegationToken getDelegationTokenFromJson(JSONObject json)
throws JSONException {
DelegationToken ret = new DelegationToken();
if (json.has("token")) {
ret.setToken(json.getString("token"));
} else if (json.has("expiration-time")) {
ret.setNextExpirationTime(json.getLong("expiration-time"));
}
return ret;
}
private void assertValidRMToken(String encodedToken) throws IOException {
Token<RMDelegationTokenIdentifier> realToken =
new Token<RMDelegationTokenIdentifier>();
realToken.decodeFromUrlString(encodedToken);
RMDelegationTokenIdentifier ident = rm.getRMContext()
.getRMDelegationTokenSecretManager().decodeTokenIdentifier(realToken);
rm.getRMContext().getRMDelegationTokenSecretManager()
.verifyToken(ident, realToken.getPassword());
assertTrue(rm.getRMContext().getRMDelegationTokenSecretManager()
.getAllTokens().containsKey(ident));
}
private void assertTokenCancelled(String encodedToken) throws Exception {
Token<RMDelegationTokenIdentifier> realToken =
new Token<RMDelegationTokenIdentifier>();
realToken.decodeFromUrlString(encodedToken);
RMDelegationTokenIdentifier ident = rm.getRMContext()
.getRMDelegationTokenSecretManager().decodeTokenIdentifier(realToken);
boolean exceptionCaught = false;
try {
rm.getRMContext().getRMDelegationTokenSecretManager()
.verifyToken(ident, realToken.getPassword());
} catch (InvalidToken it) {
exceptionCaught = true;
}
assertTrue(exceptionCaught, "InvalidToken exception not thrown");
assertFalse(rm.getRMContext().getRMDelegationTokenSecretManager()
.getAllTokens().containsKey(ident));
}
private static String generateRenewTokenBody(String mediaType, String token)
throws Exception {
String body = "";
if (mediaType.contains(MediaType.APPLICATION_JSON)) {
DelegationToken dToken = new DelegationToken();
dToken.setToken(token);
body = toJson(dToken, DelegationToken.class);
} else {
body =
"<delegation-token><token>" + token + "</token></delegation-token>";
}
return body;
}
}
|
google/guava | 35,968 | guava-tests/test/com/google/common/collect/MapMakerInternalMapTest.java | /*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.MapMakerInternalMap.DRAIN_THRESHOLD;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.base.Equivalence;
import com.google.common.collect.MapMakerInternalMap.InternalEntry;
import com.google.common.collect.MapMakerInternalMap.Segment;
import com.google.common.collect.MapMakerInternalMap.Strength;
import com.google.common.collect.MapMakerInternalMap.WeakValueEntry;
import com.google.common.collect.MapMakerInternalMap.WeakValueReference;
import com.google.common.testing.NullPointerTester;
import java.lang.ref.Reference;
import java.util.concurrent.atomic.AtomicReferenceArray;
import junit.framework.TestCase;
import org.jspecify.annotations.NullUnmarked;
/**
* @author Charles Fry
*/
@SuppressWarnings("deprecation") // many tests of deprecated methods
@NullUnmarked
public class MapMakerInternalMapTest extends TestCase {
static final int SMALL_MAX_SIZE = DRAIN_THRESHOLD * 5;
private static <K, V>
MapMakerInternalMap<K, V, ? extends InternalEntry<K, V, ?>, ? extends Segment<K, V, ?, ?>>
makeMap(MapMaker maker) {
return MapMakerInternalMap.create(maker);
}
private static MapMaker createMapMaker() {
MapMaker maker = new MapMaker();
maker.useCustomMap = true;
return maker;
}
// constructor tests
public void testDefaults() {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(createMapMaker());
assertSame(Strength.STRONG, map.keyStrength());
assertSame(Strength.STRONG, map.valueStrength());
assertSame(map.keyStrength().defaultEquivalence(), map.keyEquivalence);
assertSame(map.valueStrength().defaultEquivalence(), map.valueEquivalence());
assertThat(map.entryHelper)
.isInstanceOf(MapMakerInternalMap.StrongKeyStrongValueEntry.Helper.class);
assertEquals(4, map.concurrencyLevel);
// concurrency level
assertThat(map.segments).hasLength(4);
// initial capacity / concurrency level
assertEquals(16 / map.segments.length, map.segments[0].table.length());
}
public void testSetKeyEquivalence() {
Equivalence<Object> testEquivalence =
new Equivalence<Object>() {
@Override
protected boolean doEquivalent(Object a, Object b) {
return false;
}
@Override
protected int doHash(Object t) {
return 0;
}
};
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().keyEquivalence(testEquivalence));
assertSame(testEquivalence, map.keyEquivalence);
assertSame(map.valueStrength().defaultEquivalence(), map.valueEquivalence());
}
public void testSetConcurrencyLevel() {
// round up to the nearest power of two
checkConcurrencyLevel(1, 1);
checkConcurrencyLevel(2, 2);
checkConcurrencyLevel(3, 4);
checkConcurrencyLevel(4, 4);
checkConcurrencyLevel(5, 8);
checkConcurrencyLevel(6, 8);
checkConcurrencyLevel(7, 8);
checkConcurrencyLevel(8, 8);
}
private static void checkConcurrencyLevel(int concurrencyLevel, int segmentCount) {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(concurrencyLevel));
assertThat(map.segments).hasLength(segmentCount);
}
public void testSetInitialCapacity() {
// share capacity over each segment, then round up to the nearest power of two
checkInitialCapacity(1, 0, 1);
checkInitialCapacity(1, 1, 1);
checkInitialCapacity(1, 2, 2);
checkInitialCapacity(1, 3, 4);
checkInitialCapacity(1, 4, 4);
checkInitialCapacity(1, 5, 8);
checkInitialCapacity(1, 6, 8);
checkInitialCapacity(1, 7, 8);
checkInitialCapacity(1, 8, 8);
checkInitialCapacity(2, 0, 1);
checkInitialCapacity(2, 1, 1);
checkInitialCapacity(2, 2, 1);
checkInitialCapacity(2, 3, 2);
checkInitialCapacity(2, 4, 2);
checkInitialCapacity(2, 5, 4);
checkInitialCapacity(2, 6, 4);
checkInitialCapacity(2, 7, 4);
checkInitialCapacity(2, 8, 4);
checkInitialCapacity(4, 0, 1);
checkInitialCapacity(4, 1, 1);
checkInitialCapacity(4, 2, 1);
checkInitialCapacity(4, 3, 1);
checkInitialCapacity(4, 4, 1);
checkInitialCapacity(4, 5, 2);
checkInitialCapacity(4, 6, 2);
checkInitialCapacity(4, 7, 2);
checkInitialCapacity(4, 8, 2);
}
private static void checkInitialCapacity(
int concurrencyLevel, int initialCapacity, int segmentSize) {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(
createMapMaker().concurrencyLevel(concurrencyLevel).initialCapacity(initialCapacity));
for (int i = 0; i < map.segments.length; i++) {
assertEquals(segmentSize, map.segments[i].table.length());
}
}
public void testSetWeakKeys() {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(createMapMaker().weakKeys());
checkStrength(map, Strength.WEAK, Strength.STRONG);
assertThat(map.entryHelper)
.isInstanceOf(MapMakerInternalMap.WeakKeyStrongValueEntry.Helper.class);
}
public void testSetWeakValues() {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(createMapMaker().weakValues());
checkStrength(map, Strength.STRONG, Strength.WEAK);
assertThat(map.entryHelper)
.isInstanceOf(MapMakerInternalMap.StrongKeyWeakValueEntry.Helper.class);
}
private static void checkStrength(
MapMakerInternalMap<Object, Object, ?, ?> map, Strength keyStrength, Strength valueStrength) {
assertSame(keyStrength, map.keyStrength());
assertSame(valueStrength, map.valueStrength());
assertSame(keyStrength.defaultEquivalence(), map.keyEquivalence);
assertSame(valueStrength.defaultEquivalence(), map.valueEquivalence());
}
// Segment core tests
public void testNewEntry() {
for (MapMaker maker : allWeakValueStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker);
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
Object valueOne = new Object();
int hashOne = map.hash(keyOne);
InternalEntry<Object, Object, ?> entryOne = segment.newEntryForTesting(keyOne, hashOne, null);
WeakValueReference<Object, Object, ?> valueRefOne =
segment.newWeakValueReferenceForTesting(entryOne, valueOne);
assertSame(valueOne, valueRefOne.get());
segment.setWeakValueReferenceForTesting(entryOne, valueRefOne);
assertSame(keyOne, entryOne.getKey());
assertEquals(hashOne, entryOne.getHash());
assertNull(entryOne.getNext());
assertSame(valueRefOne, segment.getWeakValueReferenceForTesting(entryOne));
Object keyTwo = new Object();
Object valueTwo = new Object();
int hashTwo = map.hash(keyTwo);
InternalEntry<Object, Object, ?> entryTwo =
segment.newEntryForTesting(keyTwo, hashTwo, entryOne);
WeakValueReference<Object, Object, ?> valueRefTwo =
segment.newWeakValueReferenceForTesting(entryTwo, valueTwo);
assertSame(valueTwo, valueRefTwo.get());
segment.setWeakValueReferenceForTesting(entryTwo, valueRefTwo);
assertSame(keyTwo, entryTwo.getKey());
assertEquals(hashTwo, entryTwo.getHash());
assertSame(entryOne, entryTwo.getNext());
assertSame(valueRefTwo, segment.getWeakValueReferenceForTesting(entryTwo));
}
}
public void testCopyEntry() {
for (MapMaker maker : allWeakValueStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker);
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
Object valueOne = new Object();
int hashOne = map.hash(keyOne);
InternalEntry<Object, Object, ?> entryOne = segment.newEntryForTesting(keyOne, hashOne, null);
segment.setValueForTesting(entryOne, valueOne);
Object keyTwo = new Object();
Object valueTwo = new Object();
int hashTwo = map.hash(keyTwo);
InternalEntry<Object, Object, ?> entryTwo = segment.newEntryForTesting(keyTwo, hashTwo, null);
segment.setValueForTesting(entryTwo, valueTwo);
InternalEntry<Object, Object, ?> copyOne = segment.copyForTesting(entryOne, null);
assertSame(keyOne, entryOne.getKey());
assertEquals(hashOne, entryOne.getHash());
assertNull(entryOne.getNext());
assertSame(valueOne, copyOne.getValue());
InternalEntry<Object, Object, ?> copyTwo = segment.copyForTesting(entryTwo, copyOne);
assertSame(keyTwo, copyTwo.getKey());
assertEquals(hashTwo, copyTwo.getHash());
assertSame(copyOne, copyTwo.getNext());
assertSame(valueTwo, copyTwo.getValue());
}
}
public void testSegmentGetAndContains() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
// TODO(fry): check recency ordering
Object key = new Object();
int hash = map.hash(key);
Object value = new Object();
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
int index = hash & (table.length() - 1);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
segment.setValueForTesting(entry, value);
assertNull(segment.get(key, hash));
// count == 0
segment.setTableEntryForTesting(index, entry);
assertNull(segment.get(key, hash));
assertFalse(segment.containsKey(key, hash));
assertFalse(segment.containsValue(value));
// count == 1
segment.count++;
assertSame(value, segment.get(key, hash));
assertTrue(segment.containsKey(key, hash));
assertTrue(segment.containsValue(value));
// don't see absent values now that count > 0
assertNull(segment.get(new Object(), hash));
// null key
InternalEntry<Object, Object, ?> nullEntry = segment.newEntryForTesting(null, hash, entry);
Object nullValue = new Object();
WeakValueReference<Object, Object, ?> nullValueRef =
segment.newWeakValueReferenceForTesting(nullEntry, nullValue);
segment.setWeakValueReferenceForTesting(nullEntry, nullValueRef);
segment.setTableEntryForTesting(index, nullEntry);
// skip the null key
assertSame(value, segment.get(key, hash));
assertTrue(segment.containsKey(key, hash));
assertTrue(segment.containsValue(value));
assertFalse(segment.containsValue(nullValue));
// hash collision
InternalEntry<Object, Object, ?> dummyEntry =
segment.newEntryForTesting(new Object(), hash, entry);
Object dummyValue = new Object();
WeakValueReference<Object, Object, ?> dummyValueRef =
segment.newWeakValueReferenceForTesting(dummyEntry, dummyValue);
segment.setWeakValueReferenceForTesting(dummyEntry, dummyValueRef);
segment.setTableEntryForTesting(index, dummyEntry);
assertSame(value, segment.get(key, hash));
assertTrue(segment.containsKey(key, hash));
assertTrue(segment.containsValue(value));
assertTrue(segment.containsValue(dummyValue));
// key collision
dummyEntry = segment.newEntryForTesting(key, hash, entry);
dummyValue = new Object();
dummyValueRef = segment.newWeakValueReferenceForTesting(dummyEntry, dummyValue);
segment.setWeakValueReferenceForTesting(dummyEntry, dummyValueRef);
segment.setTableEntryForTesting(index, dummyEntry);
// returns the most recent entry
assertSame(dummyValue, segment.get(key, hash));
assertTrue(segment.containsKey(key, hash));
assertTrue(segment.containsValue(value));
assertTrue(segment.containsValue(dummyValue));
}
public void testSegmentReplaceValue() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
// TODO(fry): check recency ordering
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
Object newValue = new Object();
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
int index = hash & (table.length() - 1);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
// no entry
assertFalse(segment.replace(key, hash, oldValue, newValue));
assertEquals(0, segment.count);
// same value
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
assertTrue(segment.replace(key, hash, oldValue, newValue));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
// different value
assertFalse(segment.replace(key, hash, oldValue, newValue));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
// cleared
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
oldValueRef.clear();
assertFalse(segment.replace(key, hash, oldValue, newValue));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
}
public void testSegmentReplace() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
// TODO(fry): check recency ordering
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
Object newValue = new Object();
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
int index = hash & (table.length() - 1);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
// no entry
assertNull(segment.replace(key, hash, newValue));
assertEquals(0, segment.count);
// same key
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
assertSame(oldValue, segment.replace(key, hash, newValue));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
// cleared
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
oldValueRef.clear();
assertNull(segment.replace(key, hash, newValue));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
}
public void testSegmentPut() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
// TODO(fry): check recency ordering
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
Object newValue = new Object();
// no entry
assertEquals(0, segment.count);
assertNull(segment.put(key, hash, oldValue, false));
assertEquals(1, segment.count);
// same key
assertSame(oldValue, segment.put(key, hash, newValue, false));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
// cleared
InternalEntry<Object, Object, ?> entry = segment.getEntry(key, hash);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
assertSame(oldValue, segment.get(key, hash));
oldValueRef.clear();
assertNull(segment.put(key, hash, newValue, false));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
}
public void testSegmentPutIfAbsent() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
// TODO(fry): check recency ordering
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
Object newValue = new Object();
// no entry
assertEquals(0, segment.count);
assertNull(segment.put(key, hash, oldValue, true));
assertEquals(1, segment.count);
// same key
assertSame(oldValue, segment.put(key, hash, newValue, true));
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
// cleared
InternalEntry<Object, Object, ?> entry = segment.getEntry(key, hash);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
assertSame(oldValue, segment.get(key, hash));
oldValueRef.clear();
assertNull(segment.put(key, hash, newValue, true));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
}
public void testSegmentPut_expand() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
assertEquals(1, segment.table.length());
int count = 1024;
for (int i = 0; i < count; i++) {
Object key = new Object();
Object value = new Object();
int hash = map.hash(key);
assertNull(segment.put(key, hash, value, false));
assertTrue(segment.table.length() > i);
}
}
public void testSegmentRemove() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
int index = hash & (table.length() - 1);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
// no entry
assertEquals(0, segment.count);
assertNull(segment.remove(key, hash));
assertEquals(0, segment.count);
// same key
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
assertSame(oldValue, segment.remove(key, hash));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
// cleared
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
oldValueRef.clear();
assertNull(segment.remove(key, hash));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
}
public void testSegmentRemoveValue() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
Object newValue = new Object();
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
int index = hash & (table.length() - 1);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
// no entry
assertEquals(0, segment.count);
assertNull(segment.remove(key, hash));
assertEquals(0, segment.count);
// same value
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
assertTrue(segment.remove(key, hash, oldValue));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
// different value
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
assertFalse(segment.remove(key, hash, newValue));
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
// cleared
assertSame(oldValue, segment.get(key, hash));
oldValueRef.clear();
assertFalse(segment.remove(key, hash, oldValue));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
}
@SuppressWarnings("GuardedBy")
public void testExpand() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
assertEquals(1, segment.table.length());
// manually add elements to avoid expansion
int originalCount = 1024;
InternalEntry<Object, Object, ?> entry = null;
for (int i = 0; i < originalCount; i++) {
Object key = new Object();
Object value = new Object();
int hash = map.hash(key);
// chain all entries together as we only have a single bucket
entry = segment.newEntryForTesting(key, hash, entry);
segment.setValueForTesting(entry, value);
}
segment.setTableEntryForTesting(0, entry);
segment.count = originalCount;
ImmutableMap<Object, Object> originalMap = ImmutableMap.copyOf(map);
assertEquals(originalCount, originalMap.size());
assertEquals(originalMap, map);
for (int i = 1; i <= originalCount * 2; i *= 2) {
if (i > 1) {
// TODO(b/145386688): This access should be guarded by 'segment', which is not currently
// held
segment.expand();
}
assertEquals(i, segment.table.length());
assertEquals(originalCount, countLiveEntries(map));
assertEquals(originalCount, segment.count);
assertEquals(originalMap, map);
}
}
public void testRemoveFromChain() {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(createMapMaker().concurrencyLevel(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
// create 3 objects and chain them together
Object keyOne = new Object();
Object valueOne = new Object();
int hashOne = map.hash(keyOne);
InternalEntry<Object, Object, ?> entryOne = segment.newEntryForTesting(keyOne, hashOne, null);
segment.setValueForTesting(entryOne, valueOne);
Object keyTwo = new Object();
Object valueTwo = new Object();
int hashTwo = map.hash(keyTwo);
InternalEntry<Object, Object, ?> entryTwo =
segment.newEntryForTesting(keyTwo, hashTwo, entryOne);
segment.setValueForTesting(entryTwo, valueTwo);
Object keyThree = new Object();
Object valueThree = new Object();
int hashThree = map.hash(keyThree);
InternalEntry<Object, Object, ?> entryThree =
segment.newEntryForTesting(keyThree, hashThree, entryTwo);
segment.setValueForTesting(entryThree, valueThree);
// alone
assertNull(segment.removeFromChainForTesting(entryOne, entryOne));
// head
assertSame(entryOne, segment.removeFromChainForTesting(entryTwo, entryTwo));
// middle
InternalEntry<Object, Object, ?> newFirst =
segment.removeFromChainForTesting(entryThree, entryTwo);
assertSame(keyThree, newFirst.getKey());
assertSame(valueThree, newFirst.getValue());
assertEquals(hashThree, newFirst.getHash());
assertSame(entryOne, newFirst.getNext());
// tail (remaining entries are copied in reverse order)
newFirst = segment.removeFromChainForTesting(entryThree, entryOne);
assertSame(keyTwo, newFirst.getKey());
assertSame(valueTwo, newFirst.getValue());
assertEquals(hashTwo, newFirst.getHash());
newFirst = newFirst.getNext();
assertSame(keyThree, newFirst.getKey());
assertSame(valueThree, newFirst.getValue());
assertEquals(hashThree, newFirst.getHash());
assertNull(newFirst.getNext());
}
@SuppressWarnings("GuardedBy")
public void testExpand_cleanup() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
assertEquals(1, segment.table.length());
// manually add elements to avoid expansion
// 1/3 null keys, 1/3 null values
int originalCount = 1024;
InternalEntry<Object, Object, ?> entry = null;
for (int i = 0; i < originalCount; i++) {
Object key = new Object();
Object value = (i % 3 == 0) ? null : new Object();
int hash = map.hash(key);
if (i % 3 == 1) {
key = null;
}
// chain all entries together as we only have a single bucket
entry = segment.newEntryForTesting(key, hash, entry);
segment.setValueForTesting(entry, value);
}
segment.setTableEntryForTesting(0, entry);
segment.count = originalCount;
int liveCount = originalCount / 3;
assertEquals(1, segment.table.length());
assertEquals(liveCount, countLiveEntries(map));
ImmutableMap<Object, Object> originalMap = ImmutableMap.copyOf(map);
assertEquals(liveCount, originalMap.size());
// can't compare map contents until cleanup occurs
for (int i = 1; i <= originalCount * 2; i *= 2) {
if (i > 1) {
// TODO(b/145386688): This access should be guarded by 'segment', which is not currently
// held
segment.expand();
}
assertEquals(i, segment.table.length());
assertEquals(liveCount, countLiveEntries(map));
// expansion cleanup is sloppy, with a goal of avoiding unnecessary copies
assertTrue(segment.count >= liveCount);
assertTrue(segment.count <= originalCount);
assertEquals(originalMap, ImmutableMap.copyOf(map));
}
}
private static <K, V> int countLiveEntries(MapMakerInternalMap<K, V, ?, ?> map) {
int result = 0;
for (Segment<K, V, ?, ?> segment : map.segments) {
AtomicReferenceArray<? extends InternalEntry<K, V, ?>> table = segment.table;
for (int i = 0; i < table.length(); i++) {
for (InternalEntry<K, V, ?> e = table.get(i); e != null; e = e.getNext()) {
if (map.isLiveForTesting(e)) {
result++;
}
}
}
}
return result;
}
public void testClear() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
assertEquals(1, table.length());
Object key = new Object();
Object value = new Object();
int hash = map.hash(key);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
segment.setValueForTesting(entry, value);
segment.setTableEntryForTesting(0, entry);
segment.readCount.incrementAndGet();
segment.count = 1;
assertSame(entry, table.get(0));
segment.clear();
assertNull(table.get(0));
assertEquals(0, segment.readCount.get());
assertEquals(0, segment.count);
}
public void testRemoveEntry() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
assertEquals(1, table.length());
Object key = new Object();
Object value = new Object();
int hash = map.hash(key);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
segment.setValueForTesting(entry, value);
// remove absent
assertFalse(segment.removeTableEntryForTesting(entry));
segment.setTableEntryForTesting(0, entry);
segment.count = 1;
assertTrue(segment.removeTableEntryForTesting(entry));
assertEquals(0, segment.count);
assertNull(table.get(0));
}
public void testClearValue() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
assertEquals(1, table.length());
Object key = new Object();
Object value = new Object();
int hash = map.hash(key);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
segment.setValueForTesting(entry, value);
WeakValueReference<Object, Object, ?> valueRef = segment.getWeakValueReferenceForTesting(entry);
// clear absent
assertFalse(segment.clearValueForTesting(key, hash, valueRef));
segment.setTableEntryForTesting(0, entry);
// don't increment count; this is used during computation
assertTrue(segment.clearValueForTesting(key, hash, valueRef));
// no notification sent with clearValue
assertEquals(0, segment.count);
assertNull(table.get(0));
// clear wrong value reference
segment.setTableEntryForTesting(0, entry);
WeakValueReference<Object, Object, ?> otherValueRef =
segment.newWeakValueReferenceForTesting(entry, value);
segment.setWeakValueReferenceForTesting(entry, otherValueRef);
assertFalse(segment.clearValueForTesting(key, hash, valueRef));
segment.setWeakValueReferenceForTesting(entry, valueRef);
assertTrue(segment.clearValueForTesting(key, hash, valueRef));
}
// reference queues
public void testDrainKeyReferenceQueueOnWrite() {
for (MapMaker maker : allWeakKeyStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker.concurrencyLevel(1));
if (maker.getKeyStrength() == Strength.WEAK) {
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
int hashOne = map.hash(keyOne);
Object valueOne = new Object();
Object keyTwo = new Object();
Object valueTwo = new Object();
map.put(keyOne, valueOne);
InternalEntry<Object, Object, ?> entry = segment.getEntry(keyOne, hashOne);
@SuppressWarnings("unchecked")
Reference<Object> reference = (Reference<Object>) entry;
reference.enqueue();
map.put(keyTwo, valueTwo);
assertFalse(map.containsKey(keyOne));
assertFalse(map.containsValue(valueOne));
assertNull(map.get(keyOne));
assertEquals(1, map.size());
assertNull(segment.getKeyReferenceQueueForTesting().poll());
}
}
}
public void testDrainValueReferenceQueueOnWrite() {
for (MapMaker maker : allWeakValueStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker.concurrencyLevel(1));
if (maker.getValueStrength() == Strength.WEAK) {
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
int hashOne = map.hash(keyOne);
Object valueOne = new Object();
Object keyTwo = new Object();
Object valueTwo = new Object();
map.put(keyOne, valueOne);
WeakValueEntry<Object, Object, ?> entry =
(WeakValueEntry<Object, Object, ?>) segment.getEntry(keyOne, hashOne);
WeakValueReference<Object, Object, ?> valueReference = entry.getValueReference();
@SuppressWarnings("unchecked")
Reference<Object> reference = (Reference<Object>) valueReference;
reference.enqueue();
map.put(keyTwo, valueTwo);
assertFalse(map.containsKey(keyOne));
assertFalse(map.containsValue(valueOne));
assertNull(map.get(keyOne));
assertEquals(1, map.size());
assertNull(segment.getValueReferenceQueueForTesting().poll());
}
}
}
public void testDrainKeyReferenceQueueOnRead() {
for (MapMaker maker : allWeakKeyStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker.concurrencyLevel(1));
if (maker.getKeyStrength() == Strength.WEAK) {
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
int hashOne = map.hash(keyOne);
Object valueOne = new Object();
Object keyTwo = new Object();
map.put(keyOne, valueOne);
InternalEntry<Object, Object, ?> entry = segment.getEntry(keyOne, hashOne);
@SuppressWarnings("unchecked")
Reference<Object> reference = (Reference<Object>) entry;
reference.enqueue();
for (int i = 0; i < SMALL_MAX_SIZE; i++) {
Object unused = map.get(keyTwo);
}
assertFalse(map.containsKey(keyOne));
assertFalse(map.containsValue(valueOne));
assertNull(map.get(keyOne));
assertEquals(0, map.size());
assertNull(segment.getKeyReferenceQueueForTesting().poll());
}
}
}
public void testDrainValueReferenceQueueOnRead() {
for (MapMaker maker : allWeakValueStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker.concurrencyLevel(1));
if (maker.getValueStrength() == Strength.WEAK) {
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
int hashOne = map.hash(keyOne);
Object valueOne = new Object();
Object keyTwo = new Object();
map.put(keyOne, valueOne);
WeakValueEntry<Object, Object, ?> entry =
(WeakValueEntry<Object, Object, ?>) segment.getEntry(keyOne, hashOne);
WeakValueReference<Object, Object, ?> valueReference = entry.getValueReference();
@SuppressWarnings("unchecked")
Reference<Object> reference = (Reference<Object>) valueReference;
reference.enqueue();
for (int i = 0; i < SMALL_MAX_SIZE; i++) {
Object unused = map.get(keyTwo);
}
assertFalse(map.containsKey(keyOne));
assertFalse(map.containsValue(valueOne));
assertNull(map.get(keyOne));
assertEquals(0, map.size());
assertNull(segment.getValueReferenceQueueForTesting().poll());
}
}
}
// utility methods
private static Iterable<MapMaker> allWeakKeyStrengthMakers() {
return ImmutableList.of(createMapMaker().weakKeys(), createMapMaker().weakKeys().weakValues());
}
private static Iterable<MapMaker> allWeakValueStrengthMakers() {
return ImmutableList.of(
createMapMaker().weakValues(), createMapMaker().weakKeys().weakValues());
}
public void testNullParameters() throws Exception {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicInstanceMethods(makeMap(createMapMaker()));
}
}
|
googleapis/google-cloud-java | 35,694 | java-billing/proto-google-cloud-billing-v1/src/main/java/com/google/cloud/billing/v1/ListServicesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/billing/v1/cloud_catalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.billing.v1;
/**
*
*
* <pre>
* Response message for `ListServices`.
* </pre>
*
* Protobuf type {@code google.cloud.billing.v1.ListServicesResponse}
*/
public final class ListServicesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.billing.v1.ListServicesResponse)
ListServicesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListServicesResponse.newBuilder() to construct.
private ListServicesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListServicesResponse() {
services_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListServicesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.billing.v1.CloudCatalogProto
.internal_static_google_cloud_billing_v1_ListServicesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.billing.v1.CloudCatalogProto
.internal_static_google_cloud_billing_v1_ListServicesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.billing.v1.ListServicesResponse.class,
com.google.cloud.billing.v1.ListServicesResponse.Builder.class);
}
public static final int SERVICES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.billing.v1.Service> services_;
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.billing.v1.Service> getServicesList() {
return services_;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.billing.v1.ServiceOrBuilder>
getServicesOrBuilderList() {
return services_;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
@java.lang.Override
public int getServicesCount() {
return services_.size();
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
@java.lang.Override
public com.google.cloud.billing.v1.Service getServices(int index) {
return services_.get(index);
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
@java.lang.Override
public com.google.cloud.billing.v1.ServiceOrBuilder getServicesOrBuilder(int index) {
return services_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results. To retrieve the next page,
* call `ListServices` again with the `page_token` field set to this
* value. This field is empty if there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. To retrieve the next page,
* call `ListServices` again with the `page_token` field set to this
* value. This field is empty if there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < services_.size(); i++) {
output.writeMessage(1, services_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < services_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, services_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.billing.v1.ListServicesResponse)) {
return super.equals(obj);
}
com.google.cloud.billing.v1.ListServicesResponse other =
(com.google.cloud.billing.v1.ListServicesResponse) obj;
if (!getServicesList().equals(other.getServicesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getServicesCount() > 0) {
hash = (37 * hash) + SERVICES_FIELD_NUMBER;
hash = (53 * hash) + getServicesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.billing.v1.ListServicesResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.billing.v1.ListServicesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.billing.v1.ListServicesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for `ListServices`.
* </pre>
*
* Protobuf type {@code google.cloud.billing.v1.ListServicesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.billing.v1.ListServicesResponse)
com.google.cloud.billing.v1.ListServicesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.billing.v1.CloudCatalogProto
.internal_static_google_cloud_billing_v1_ListServicesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.billing.v1.CloudCatalogProto
.internal_static_google_cloud_billing_v1_ListServicesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.billing.v1.ListServicesResponse.class,
com.google.cloud.billing.v1.ListServicesResponse.Builder.class);
}
// Construct using com.google.cloud.billing.v1.ListServicesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (servicesBuilder_ == null) {
services_ = java.util.Collections.emptyList();
} else {
services_ = null;
servicesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.billing.v1.CloudCatalogProto
.internal_static_google_cloud_billing_v1_ListServicesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.billing.v1.ListServicesResponse getDefaultInstanceForType() {
return com.google.cloud.billing.v1.ListServicesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.billing.v1.ListServicesResponse build() {
com.google.cloud.billing.v1.ListServicesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.billing.v1.ListServicesResponse buildPartial() {
com.google.cloud.billing.v1.ListServicesResponse result =
new com.google.cloud.billing.v1.ListServicesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.billing.v1.ListServicesResponse result) {
if (servicesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
services_ = java.util.Collections.unmodifiableList(services_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.services_ = services_;
} else {
result.services_ = servicesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.billing.v1.ListServicesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.billing.v1.ListServicesResponse) {
return mergeFrom((com.google.cloud.billing.v1.ListServicesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.billing.v1.ListServicesResponse other) {
if (other == com.google.cloud.billing.v1.ListServicesResponse.getDefaultInstance())
return this;
if (servicesBuilder_ == null) {
if (!other.services_.isEmpty()) {
if (services_.isEmpty()) {
services_ = other.services_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureServicesIsMutable();
services_.addAll(other.services_);
}
onChanged();
}
} else {
if (!other.services_.isEmpty()) {
if (servicesBuilder_.isEmpty()) {
servicesBuilder_.dispose();
servicesBuilder_ = null;
services_ = other.services_;
bitField0_ = (bitField0_ & ~0x00000001);
servicesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getServicesFieldBuilder()
: null;
} else {
servicesBuilder_.addAllMessages(other.services_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.billing.v1.Service m =
input.readMessage(
com.google.cloud.billing.v1.Service.parser(), extensionRegistry);
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(m);
} else {
servicesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.billing.v1.Service> services_ =
java.util.Collections.emptyList();
private void ensureServicesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
services_ = new java.util.ArrayList<com.google.cloud.billing.v1.Service>(services_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.billing.v1.Service,
com.google.cloud.billing.v1.Service.Builder,
com.google.cloud.billing.v1.ServiceOrBuilder>
servicesBuilder_;
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public java.util.List<com.google.cloud.billing.v1.Service> getServicesList() {
if (servicesBuilder_ == null) {
return java.util.Collections.unmodifiableList(services_);
} else {
return servicesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public int getServicesCount() {
if (servicesBuilder_ == null) {
return services_.size();
} else {
return servicesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public com.google.cloud.billing.v1.Service getServices(int index) {
if (servicesBuilder_ == null) {
return services_.get(index);
} else {
return servicesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public Builder setServices(int index, com.google.cloud.billing.v1.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.set(index, value);
onChanged();
} else {
servicesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public Builder setServices(
int index, com.google.cloud.billing.v1.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.set(index, builderForValue.build());
onChanged();
} else {
servicesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public Builder addServices(com.google.cloud.billing.v1.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.add(value);
onChanged();
} else {
servicesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public Builder addServices(int index, com.google.cloud.billing.v1.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.add(index, value);
onChanged();
} else {
servicesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public Builder addServices(com.google.cloud.billing.v1.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(builderForValue.build());
onChanged();
} else {
servicesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public Builder addServices(
int index, com.google.cloud.billing.v1.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(index, builderForValue.build());
onChanged();
} else {
servicesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public Builder addAllServices(
java.lang.Iterable<? extends com.google.cloud.billing.v1.Service> values) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, services_);
onChanged();
} else {
servicesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public Builder clearServices() {
if (servicesBuilder_ == null) {
services_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
servicesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public Builder removeServices(int index) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.remove(index);
onChanged();
} else {
servicesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public com.google.cloud.billing.v1.Service.Builder getServicesBuilder(int index) {
return getServicesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public com.google.cloud.billing.v1.ServiceOrBuilder getServicesOrBuilder(int index) {
if (servicesBuilder_ == null) {
return services_.get(index);
} else {
return servicesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public java.util.List<? extends com.google.cloud.billing.v1.ServiceOrBuilder>
getServicesOrBuilderList() {
if (servicesBuilder_ != null) {
return servicesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(services_);
}
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public com.google.cloud.billing.v1.Service.Builder addServicesBuilder() {
return getServicesFieldBuilder()
.addBuilder(com.google.cloud.billing.v1.Service.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public com.google.cloud.billing.v1.Service.Builder addServicesBuilder(int index) {
return getServicesFieldBuilder()
.addBuilder(index, com.google.cloud.billing.v1.Service.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of services.
* </pre>
*
* <code>repeated .google.cloud.billing.v1.Service services = 1;</code>
*/
public java.util.List<com.google.cloud.billing.v1.Service.Builder> getServicesBuilderList() {
return getServicesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.billing.v1.Service,
com.google.cloud.billing.v1.Service.Builder,
com.google.cloud.billing.v1.ServiceOrBuilder>
getServicesFieldBuilder() {
if (servicesBuilder_ == null) {
servicesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.billing.v1.Service,
com.google.cloud.billing.v1.Service.Builder,
com.google.cloud.billing.v1.ServiceOrBuilder>(
services_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
services_ = null;
}
return servicesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token to retrieve the next page of results. To retrieve the next page,
* call `ListServices` again with the `page_token` field set to this
* value. This field is empty if there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. To retrieve the next page,
* call `ListServices` again with the `page_token` field set to this
* value. This field is empty if there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. To retrieve the next page,
* call `ListServices` again with the `page_token` field set to this
* value. This field is empty if there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. To retrieve the next page,
* call `ListServices` again with the `page_token` field set to this
* value. This field is empty if there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token to retrieve the next page of results. To retrieve the next page,
* call `ListServices` again with the `page_token` field set to this
* value. This field is empty if there are no more results to retrieve.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.billing.v1.ListServicesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.billing.v1.ListServicesResponse)
private static final com.google.cloud.billing.v1.ListServicesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.billing.v1.ListServicesResponse();
}
public static com.google.cloud.billing.v1.ListServicesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListServicesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListServicesResponse>() {
@java.lang.Override
public ListServicesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListServicesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListServicesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.billing.v1.ListServicesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,665 | java-securityposture/proto-google-cloud-securityposture-v1/src/main/java/com/google/cloud/securityposture/v1/CreatePostureRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securityposture/v1/securityposture.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securityposture.v1;
/**
*
*
* <pre>
* Message for creating a Posture.
* </pre>
*
* Protobuf type {@code google.cloud.securityposture.v1.CreatePostureRequest}
*/
public final class CreatePostureRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securityposture.v1.CreatePostureRequest)
CreatePostureRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreatePostureRequest.newBuilder() to construct.
private CreatePostureRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreatePostureRequest() {
parent_ = "";
postureId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreatePostureRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securityposture.v1.V1mainProto
.internal_static_google_cloud_securityposture_v1_CreatePostureRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securityposture.v1.V1mainProto
.internal_static_google_cloud_securityposture_v1_CreatePostureRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securityposture.v1.CreatePostureRequest.class,
com.google.cloud.securityposture.v1.CreatePostureRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int POSTURE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object postureId_ = "";
/**
*
*
* <pre>
* Required. User provided identifier. It should be unique in scope of an
* Organization and location.
* </pre>
*
* <code>string posture_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The postureId.
*/
@java.lang.Override
public java.lang.String getPostureId() {
java.lang.Object ref = postureId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
postureId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. User provided identifier. It should be unique in scope of an
* Organization and location.
* </pre>
*
* <code>string posture_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for postureId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPostureIdBytes() {
java.lang.Object ref = postureId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
postureId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int POSTURE_FIELD_NUMBER = 3;
private com.google.cloud.securityposture.v1.Posture posture_;
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the posture field is set.
*/
@java.lang.Override
public boolean hasPosture() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The posture.
*/
@java.lang.Override
public com.google.cloud.securityposture.v1.Posture getPosture() {
return posture_ == null
? com.google.cloud.securityposture.v1.Posture.getDefaultInstance()
: posture_;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.securityposture.v1.PostureOrBuilder getPostureOrBuilder() {
return posture_ == null
? com.google.cloud.securityposture.v1.Posture.getDefaultInstance()
: posture_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(postureId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, postureId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getPosture());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(postureId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, postureId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getPosture());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securityposture.v1.CreatePostureRequest)) {
return super.equals(obj);
}
com.google.cloud.securityposture.v1.CreatePostureRequest other =
(com.google.cloud.securityposture.v1.CreatePostureRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getPostureId().equals(other.getPostureId())) return false;
if (hasPosture() != other.hasPosture()) return false;
if (hasPosture()) {
if (!getPosture().equals(other.getPosture())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + POSTURE_ID_FIELD_NUMBER;
hash = (53 * hash) + getPostureId().hashCode();
if (hasPosture()) {
hash = (37 * hash) + POSTURE_FIELD_NUMBER;
hash = (53 * hash) + getPosture().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securityposture.v1.CreatePostureRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for creating a Posture.
* </pre>
*
* Protobuf type {@code google.cloud.securityposture.v1.CreatePostureRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securityposture.v1.CreatePostureRequest)
com.google.cloud.securityposture.v1.CreatePostureRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securityposture.v1.V1mainProto
.internal_static_google_cloud_securityposture_v1_CreatePostureRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securityposture.v1.V1mainProto
.internal_static_google_cloud_securityposture_v1_CreatePostureRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securityposture.v1.CreatePostureRequest.class,
com.google.cloud.securityposture.v1.CreatePostureRequest.Builder.class);
}
// Construct using com.google.cloud.securityposture.v1.CreatePostureRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getPostureFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
postureId_ = "";
posture_ = null;
if (postureBuilder_ != null) {
postureBuilder_.dispose();
postureBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securityposture.v1.V1mainProto
.internal_static_google_cloud_securityposture_v1_CreatePostureRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.securityposture.v1.CreatePostureRequest getDefaultInstanceForType() {
return com.google.cloud.securityposture.v1.CreatePostureRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securityposture.v1.CreatePostureRequest build() {
com.google.cloud.securityposture.v1.CreatePostureRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securityposture.v1.CreatePostureRequest buildPartial() {
com.google.cloud.securityposture.v1.CreatePostureRequest result =
new com.google.cloud.securityposture.v1.CreatePostureRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.securityposture.v1.CreatePostureRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.postureId_ = postureId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.posture_ = postureBuilder_ == null ? posture_ : postureBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securityposture.v1.CreatePostureRequest) {
return mergeFrom((com.google.cloud.securityposture.v1.CreatePostureRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securityposture.v1.CreatePostureRequest other) {
if (other == com.google.cloud.securityposture.v1.CreatePostureRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPostureId().isEmpty()) {
postureId_ = other.postureId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasPosture()) {
mergePosture(other.getPosture());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
postureId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getPostureFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Value for parent.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object postureId_ = "";
/**
*
*
* <pre>
* Required. User provided identifier. It should be unique in scope of an
* Organization and location.
* </pre>
*
* <code>string posture_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The postureId.
*/
public java.lang.String getPostureId() {
java.lang.Object ref = postureId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
postureId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. User provided identifier. It should be unique in scope of an
* Organization and location.
* </pre>
*
* <code>string posture_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for postureId.
*/
public com.google.protobuf.ByteString getPostureIdBytes() {
java.lang.Object ref = postureId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
postureId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. User provided identifier. It should be unique in scope of an
* Organization and location.
* </pre>
*
* <code>string posture_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The postureId to set.
* @return This builder for chaining.
*/
public Builder setPostureId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
postureId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User provided identifier. It should be unique in scope of an
* Organization and location.
* </pre>
*
* <code>string posture_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearPostureId() {
postureId_ = getDefaultInstance().getPostureId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. User provided identifier. It should be unique in scope of an
* Organization and location.
* </pre>
*
* <code>string posture_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for postureId to set.
* @return This builder for chaining.
*/
public Builder setPostureIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
postureId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.securityposture.v1.Posture posture_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securityposture.v1.Posture,
com.google.cloud.securityposture.v1.Posture.Builder,
com.google.cloud.securityposture.v1.PostureOrBuilder>
postureBuilder_;
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the posture field is set.
*/
public boolean hasPosture() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The posture.
*/
public com.google.cloud.securityposture.v1.Posture getPosture() {
if (postureBuilder_ == null) {
return posture_ == null
? com.google.cloud.securityposture.v1.Posture.getDefaultInstance()
: posture_;
} else {
return postureBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPosture(com.google.cloud.securityposture.v1.Posture value) {
if (postureBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
posture_ = value;
} else {
postureBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setPosture(com.google.cloud.securityposture.v1.Posture.Builder builderForValue) {
if (postureBuilder_ == null) {
posture_ = builderForValue.build();
} else {
postureBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergePosture(com.google.cloud.securityposture.v1.Posture value) {
if (postureBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& posture_ != null
&& posture_ != com.google.cloud.securityposture.v1.Posture.getDefaultInstance()) {
getPostureBuilder().mergeFrom(value);
} else {
posture_ = value;
}
} else {
postureBuilder_.mergeFrom(value);
}
if (posture_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearPosture() {
bitField0_ = (bitField0_ & ~0x00000004);
posture_ = null;
if (postureBuilder_ != null) {
postureBuilder_.dispose();
postureBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securityposture.v1.Posture.Builder getPostureBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getPostureFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.securityposture.v1.PostureOrBuilder getPostureOrBuilder() {
if (postureBuilder_ != null) {
return postureBuilder_.getMessageOrBuilder();
} else {
return posture_ == null
? com.google.cloud.securityposture.v1.Posture.getDefaultInstance()
: posture_;
}
}
/**
*
*
* <pre>
* Required. The resource being created.
* </pre>
*
* <code>
* .google.cloud.securityposture.v1.Posture posture = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securityposture.v1.Posture,
com.google.cloud.securityposture.v1.Posture.Builder,
com.google.cloud.securityposture.v1.PostureOrBuilder>
getPostureFieldBuilder() {
if (postureBuilder_ == null) {
postureBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.securityposture.v1.Posture,
com.google.cloud.securityposture.v1.Posture.Builder,
com.google.cloud.securityposture.v1.PostureOrBuilder>(
getPosture(), getParentForChildren(), isClean());
posture_ = null;
}
return postureBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securityposture.v1.CreatePostureRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.securityposture.v1.CreatePostureRequest)
private static final com.google.cloud.securityposture.v1.CreatePostureRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securityposture.v1.CreatePostureRequest();
}
public static com.google.cloud.securityposture.v1.CreatePostureRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreatePostureRequest> PARSER =
new com.google.protobuf.AbstractParser<CreatePostureRequest>() {
@java.lang.Override
public CreatePostureRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreatePostureRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreatePostureRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securityposture.v1.CreatePostureRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,731 | java-bigquery-data-exchange/proto-google-cloud-bigquery-data-exchange-v1beta1/src/main/java/com/google/cloud/bigquery/dataexchange/v1beta1/UpdateListingRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/dataexchange/v1beta1/dataexchange.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.dataexchange.v1beta1;
/**
*
*
* <pre>
* Message for updating a Listing.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest}
*/
public final class UpdateListingRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest)
UpdateListingRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateListingRequest.newBuilder() to construct.
private UpdateListingRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateListingRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateListingRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_UpdateListingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_UpdateListingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest.class,
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int LISTING_FIELD_NUMBER = 2;
private com.google.cloud.bigquery.dataexchange.v1beta1.Listing listing_;
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the listing field is set.
*/
@java.lang.Override
public boolean hasListing() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The listing.
*/
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.Listing getListing() {
return listing_ == null
? com.google.cloud.bigquery.dataexchange.v1beta1.Listing.getDefaultInstance()
: listing_;
}
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder getListingOrBuilder() {
return listing_ == null
? com.google.cloud.bigquery.dataexchange.v1beta1.Listing.getDefaultInstance()
: listing_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getListing());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getListing());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest other =
(com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasListing() != other.hasListing()) return false;
if (hasListing()) {
if (!getListing().equals(other.getListing())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasListing()) {
hash = (37 * hash) + LISTING_FIELD_NUMBER;
hash = (53 * hash) + getListing().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for updating a Listing.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest)
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_UpdateListingRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_UpdateListingRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest.class,
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest.Builder.class);
}
// Construct using
// com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getListingFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
listing_ = null;
if (listingBuilder_ != null) {
listingBuilder_.dispose();
listingBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_UpdateListingRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest
getDefaultInstanceForType() {
return com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest build() {
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest buildPartial() {
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest result =
new com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.listing_ = listingBuilder_ == null ? listing_ : listingBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest) {
return mergeFrom(
(com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest other) {
if (other
== com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest
.getDefaultInstance()) return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasListing()) {
mergeListing(other.getListing());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getListingFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the listing resource. The
* fields specified in the `updateMask` are relative to the resource and are
* not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.bigquery.dataexchange.v1beta1.Listing listing_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.dataexchange.v1beta1.Listing,
com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder,
com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder>
listingBuilder_;
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the listing field is set.
*/
public boolean hasListing() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The listing.
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.Listing getListing() {
if (listingBuilder_ == null) {
return listing_ == null
? com.google.cloud.bigquery.dataexchange.v1beta1.Listing.getDefaultInstance()
: listing_;
} else {
return listingBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setListing(com.google.cloud.bigquery.dataexchange.v1beta1.Listing value) {
if (listingBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
listing_ = value;
} else {
listingBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setListing(
com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder builderForValue) {
if (listingBuilder_ == null) {
listing_ = builderForValue.build();
} else {
listingBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeListing(com.google.cloud.bigquery.dataexchange.v1beta1.Listing value) {
if (listingBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& listing_ != null
&& listing_
!= com.google.cloud.bigquery.dataexchange.v1beta1.Listing.getDefaultInstance()) {
getListingBuilder().mergeFrom(value);
} else {
listing_ = value;
}
} else {
listingBuilder_.mergeFrom(value);
}
if (listing_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearListing() {
bitField0_ = (bitField0_ & ~0x00000002);
listing_ = null;
if (listingBuilder_ != null) {
listingBuilder_.dispose();
listingBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder getListingBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getListingFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder getListingOrBuilder() {
if (listingBuilder_ != null) {
return listingBuilder_.getMessageOrBuilder();
} else {
return listing_ == null
? com.google.cloud.bigquery.dataexchange.v1beta1.Listing.getDefaultInstance()
: listing_;
}
}
/**
*
*
* <pre>
* Required. The listing to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.Listing listing = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.dataexchange.v1beta1.Listing,
com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder,
com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder>
getListingFieldBuilder() {
if (listingBuilder_ == null) {
listingBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.dataexchange.v1beta1.Listing,
com.google.cloud.bigquery.dataexchange.v1beta1.Listing.Builder,
com.google.cloud.bigquery.dataexchange.v1beta1.ListingOrBuilder>(
getListing(), getParentForChildren(), isClean());
listing_ = null;
}
return listingBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest)
private static final com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest();
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateListingRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateListingRequest>() {
@java.lang.Override
public UpdateListingRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateListingRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateListingRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.UpdateListingRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,810 | java-chat/proto-google-cloud-chat-v1/src/main/java/com/google/chat/v1/CreateSpaceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/chat/v1/space.proto
// Protobuf Java Version: 3.25.8
package com.google.chat.v1;
/**
*
*
* <pre>
* A request to create a named space with no members.
* </pre>
*
* Protobuf type {@code google.chat.v1.CreateSpaceRequest}
*/
public final class CreateSpaceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.chat.v1.CreateSpaceRequest)
CreateSpaceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateSpaceRequest.newBuilder() to construct.
private CreateSpaceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateSpaceRequest() {
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateSpaceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.chat.v1.SpaceProto
.internal_static_google_chat_v1_CreateSpaceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.chat.v1.SpaceProto
.internal_static_google_chat_v1_CreateSpaceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.chat.v1.CreateSpaceRequest.class,
com.google.chat.v1.CreateSpaceRequest.Builder.class);
}
private int bitField0_;
public static final int SPACE_FIELD_NUMBER = 1;
private com.google.chat.v1.Space space_;
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the space field is set.
*/
@java.lang.Override
public boolean hasSpace() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The space.
*/
@java.lang.Override
public com.google.chat.v1.Space getSpace() {
return space_ == null ? com.google.chat.v1.Space.getDefaultInstance() : space_;
}
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
@java.lang.Override
public com.google.chat.v1.SpaceOrBuilder getSpaceOrBuilder() {
return space_ == null ? com.google.chat.v1.Space.getDefaultInstance() : space_;
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A unique identifier for this request.
* A random UUID is recommended.
* Specifying an existing request ID returns the space created with that ID
* instead of creating a new space.
* Specifying an existing request ID from the same Chat app with a different
* authenticated user returns an error.
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A unique identifier for this request.
* A random UUID is recommended.
* Specifying an existing request ID returns the space created with that ID
* instead of creating a new space.
* Specifying an existing request ID from the same Chat app with a different
* authenticated user returns an error.
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSpace());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSpace());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.chat.v1.CreateSpaceRequest)) {
return super.equals(obj);
}
com.google.chat.v1.CreateSpaceRequest other = (com.google.chat.v1.CreateSpaceRequest) obj;
if (hasSpace() != other.hasSpace()) return false;
if (hasSpace()) {
if (!getSpace().equals(other.getSpace())) return false;
}
if (!getRequestId().equals(other.getRequestId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSpace()) {
hash = (37 * hash) + SPACE_FIELD_NUMBER;
hash = (53 * hash) + getSpace().hashCode();
}
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.chat.v1.CreateSpaceRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.CreateSpaceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.CreateSpaceRequest parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.CreateSpaceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.CreateSpaceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.CreateSpaceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.CreateSpaceRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.chat.v1.CreateSpaceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.chat.v1.CreateSpaceRequest parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.chat.v1.CreateSpaceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.chat.v1.CreateSpaceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.chat.v1.CreateSpaceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.chat.v1.CreateSpaceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request to create a named space with no members.
* </pre>
*
* Protobuf type {@code google.chat.v1.CreateSpaceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.chat.v1.CreateSpaceRequest)
com.google.chat.v1.CreateSpaceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.chat.v1.SpaceProto
.internal_static_google_chat_v1_CreateSpaceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.chat.v1.SpaceProto
.internal_static_google_chat_v1_CreateSpaceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.chat.v1.CreateSpaceRequest.class,
com.google.chat.v1.CreateSpaceRequest.Builder.class);
}
// Construct using com.google.chat.v1.CreateSpaceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSpaceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
space_ = null;
if (spaceBuilder_ != null) {
spaceBuilder_.dispose();
spaceBuilder_ = null;
}
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.chat.v1.SpaceProto
.internal_static_google_chat_v1_CreateSpaceRequest_descriptor;
}
@java.lang.Override
public com.google.chat.v1.CreateSpaceRequest getDefaultInstanceForType() {
return com.google.chat.v1.CreateSpaceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.chat.v1.CreateSpaceRequest build() {
com.google.chat.v1.CreateSpaceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.chat.v1.CreateSpaceRequest buildPartial() {
com.google.chat.v1.CreateSpaceRequest result =
new com.google.chat.v1.CreateSpaceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.chat.v1.CreateSpaceRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.space_ = spaceBuilder_ == null ? space_ : spaceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.chat.v1.CreateSpaceRequest) {
return mergeFrom((com.google.chat.v1.CreateSpaceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.chat.v1.CreateSpaceRequest other) {
if (other == com.google.chat.v1.CreateSpaceRequest.getDefaultInstance()) return this;
if (other.hasSpace()) {
mergeSpace(other.getSpace());
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getSpaceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.chat.v1.Space space_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.chat.v1.Space,
com.google.chat.v1.Space.Builder,
com.google.chat.v1.SpaceOrBuilder>
spaceBuilder_;
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return Whether the space field is set.
*/
public boolean hasSpace() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The space.
*/
public com.google.chat.v1.Space getSpace() {
if (spaceBuilder_ == null) {
return space_ == null ? com.google.chat.v1.Space.getDefaultInstance() : space_;
} else {
return spaceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setSpace(com.google.chat.v1.Space value) {
if (spaceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
space_ = value;
} else {
spaceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder setSpace(com.google.chat.v1.Space.Builder builderForValue) {
if (spaceBuilder_ == null) {
space_ = builderForValue.build();
} else {
spaceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder mergeSpace(com.google.chat.v1.Space value) {
if (spaceBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& space_ != null
&& space_ != com.google.chat.v1.Space.getDefaultInstance()) {
getSpaceBuilder().mergeFrom(value);
} else {
space_ = value;
}
} else {
spaceBuilder_.mergeFrom(value);
}
if (space_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public Builder clearSpace() {
bitField0_ = (bitField0_ & ~0x00000001);
space_ = null;
if (spaceBuilder_ != null) {
spaceBuilder_.dispose();
spaceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.chat.v1.Space.Builder getSpaceBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSpaceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
public com.google.chat.v1.SpaceOrBuilder getSpaceOrBuilder() {
if (spaceBuilder_ != null) {
return spaceBuilder_.getMessageOrBuilder();
} else {
return space_ == null ? com.google.chat.v1.Space.getDefaultInstance() : space_;
}
}
/**
*
*
* <pre>
* Required. The `displayName` and `spaceType` fields must be populated. Only
* `SpaceType.SPACE` and `SpaceType.GROUP_CHAT` are supported.
* `SpaceType.GROUP_CHAT` can only be used if `importMode` is set to true.
*
* If you receive the error message `ALREADY_EXISTS`,
* try a different `displayName`. An existing space within the Google
* Workspace organization might already use this display name.
*
*
* The space `name` is assigned on the server so anything specified in this
* field will be ignored.
* </pre>
*
* <code>.google.chat.v1.Space space = 1 [(.google.api.field_behavior) = REQUIRED];</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.chat.v1.Space,
com.google.chat.v1.Space.Builder,
com.google.chat.v1.SpaceOrBuilder>
getSpaceFieldBuilder() {
if (spaceBuilder_ == null) {
spaceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.chat.v1.Space,
com.google.chat.v1.Space.Builder,
com.google.chat.v1.SpaceOrBuilder>(getSpace(), getParentForChildren(), isClean());
space_ = null;
}
return spaceBuilder_;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. A unique identifier for this request.
* A random UUID is recommended.
* Specifying an existing request ID returns the space created with that ID
* instead of creating a new space.
* Specifying an existing request ID from the same Chat app with a different
* authenticated user returns an error.
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A unique identifier for this request.
* A random UUID is recommended.
* Specifying an existing request ID returns the space created with that ID
* instead of creating a new space.
* Specifying an existing request ID from the same Chat app with a different
* authenticated user returns an error.
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A unique identifier for this request.
* A random UUID is recommended.
* Specifying an existing request ID returns the space created with that ID
* instead of creating a new space.
* Specifying an existing request ID from the same Chat app with a different
* authenticated user returns an error.
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A unique identifier for this request.
* A random UUID is recommended.
* Specifying an existing request ID returns the space created with that ID
* instead of creating a new space.
* Specifying an existing request ID from the same Chat app with a different
* authenticated user returns an error.
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A unique identifier for this request.
* A random UUID is recommended.
* Specifying an existing request ID returns the space created with that ID
* instead of creating a new space.
* Specifying an existing request ID from the same Chat app with a different
* authenticated user returns an error.
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.chat.v1.CreateSpaceRequest)
}
// @@protoc_insertion_point(class_scope:google.chat.v1.CreateSpaceRequest)
private static final com.google.chat.v1.CreateSpaceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.chat.v1.CreateSpaceRequest();
}
public static com.google.chat.v1.CreateSpaceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateSpaceRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateSpaceRequest>() {
@java.lang.Override
public CreateSpaceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateSpaceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateSpaceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.chat.v1.CreateSpaceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop-common | 35,677 | hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/Storage.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.common;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.lang.management.ManagementFactory;
import java.nio.channels.FileLock;
import java.nio.channels.OverlappingFileLockException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.util.VersionInfo;
import com.google.common.base.Charsets;
import com.google.common.base.Preconditions;
/**
* Storage information file.
* <p>
* Local storage information is stored in a separate file VERSION.
* It contains type of the node,
* the storage layout version, the namespace id, and
* the fs state creation time.
* <p>
* Local storage can reside in multiple directories.
* Each directory should contain the same VERSION file as the others.
* During startup Hadoop servers (name-node and data-nodes) read their local
* storage information from them.
* <p>
* The servers hold a lock for each storage directory while they run so that
* other nodes were not able to startup sharing the same storage.
* The locks are released when the servers stop (normally or abnormally).
*
*/
@InterfaceAudience.Private
public abstract class Storage extends StorageInfo {
public static final Log LOG = LogFactory.getLog(Storage.class.getName());
// last layout version that did not support upgrades
public static final int LAST_PRE_UPGRADE_LAYOUT_VERSION = -3;
// this corresponds to Hadoop-0.18
public static final int LAST_UPGRADABLE_LAYOUT_VERSION = -16;
protected static final String LAST_UPGRADABLE_HADOOP_VERSION = "Hadoop-0.18";
/** Layout versions of 0.20.203 release */
public static final int[] LAYOUT_VERSIONS_203 = {-19, -31};
public static final String STORAGE_FILE_LOCK = "in_use.lock";
public static final String STORAGE_DIR_CURRENT = "current";
public static final String STORAGE_DIR_PREVIOUS = "previous";
public static final String STORAGE_TMP_REMOVED = "removed.tmp";
public static final String STORAGE_TMP_PREVIOUS = "previous.tmp";
public static final String STORAGE_TMP_FINALIZED = "finalized.tmp";
public static final String STORAGE_TMP_LAST_CKPT = "lastcheckpoint.tmp";
public static final String STORAGE_PREVIOUS_CKPT = "previous.checkpoint";
/**
* The blocksBeingWritten directory which was used in some 1.x and earlier
* releases.
*/
public static final String STORAGE_1_BBW = "blocksBeingWritten";
public enum StorageState {
NON_EXISTENT,
NOT_FORMATTED,
COMPLETE_UPGRADE,
RECOVER_UPGRADE,
COMPLETE_FINALIZE,
COMPLETE_ROLLBACK,
RECOVER_ROLLBACK,
COMPLETE_CHECKPOINT,
RECOVER_CHECKPOINT,
NORMAL;
}
/**
* An interface to denote storage directory type
* Implementations can define a type for storage directory by implementing
* this interface.
*/
@InterfaceAudience.Private
public interface StorageDirType {
public StorageDirType getStorageDirType();
public boolean isOfType(StorageDirType type);
}
protected List<StorageDirectory> storageDirs = new ArrayList<StorageDirectory>();
private class DirIterator implements Iterator<StorageDirectory> {
final StorageDirType dirType;
final boolean includeShared;
int prevIndex; // for remove()
int nextIndex; // for next()
DirIterator(StorageDirType dirType, boolean includeShared) {
this.dirType = dirType;
this.nextIndex = 0;
this.prevIndex = 0;
this.includeShared = includeShared;
}
@Override
public boolean hasNext() {
if (storageDirs.isEmpty() || nextIndex >= storageDirs.size())
return false;
if (dirType != null || !includeShared) {
while (nextIndex < storageDirs.size()) {
if (shouldReturnNextDir())
break;
nextIndex++;
}
if (nextIndex >= storageDirs.size())
return false;
}
return true;
}
@Override
public StorageDirectory next() {
StorageDirectory sd = getStorageDir(nextIndex);
prevIndex = nextIndex;
nextIndex++;
if (dirType != null || !includeShared) {
while (nextIndex < storageDirs.size()) {
if (shouldReturnNextDir())
break;
nextIndex++;
}
}
return sd;
}
@Override
public void remove() {
nextIndex = prevIndex; // restore previous state
storageDirs.remove(prevIndex); // remove last returned element
hasNext(); // reset nextIndex to correct place
}
private boolean shouldReturnNextDir() {
StorageDirectory sd = getStorageDir(nextIndex);
return (dirType == null || sd.getStorageDirType().isOfType(dirType)) &&
(includeShared || !sd.isShared());
}
}
/**
* @return A list of the given File in every available storage directory,
* regardless of whether it might exist.
*/
public List<File> getFiles(StorageDirType dirType, String fileName) {
ArrayList<File> list = new ArrayList<File>();
Iterator<StorageDirectory> it =
(dirType == null) ? dirIterator() : dirIterator(dirType);
for ( ;it.hasNext(); ) {
list.add(new File(it.next().getCurrentDir(), fileName));
}
return list;
}
/**
* Return default iterator
* This iterator returns all entries in storageDirs
*/
public Iterator<StorageDirectory> dirIterator() {
return dirIterator(null);
}
/**
* Return iterator based on Storage Directory Type
* This iterator selects entries in storageDirs of type dirType and returns
* them via the Iterator
*/
public Iterator<StorageDirectory> dirIterator(StorageDirType dirType) {
return dirIterator(dirType, true);
}
/**
* Return all entries in storageDirs, potentially excluding shared dirs.
* @param includeShared whether or not to include shared dirs.
* @return an iterator over the configured storage dirs.
*/
public Iterator<StorageDirectory> dirIterator(boolean includeShared) {
return dirIterator(null, includeShared);
}
/**
* @param dirType all entries will be of this type of dir
* @param includeShared true to include any shared directories,
* false otherwise
* @return an iterator over the configured storage dirs.
*/
public Iterator<StorageDirectory> dirIterator(StorageDirType dirType,
boolean includeShared) {
return new DirIterator(dirType, includeShared);
}
public Iterable<StorageDirectory> dirIterable(final StorageDirType dirType) {
return new Iterable<StorageDirectory>() {
@Override
public Iterator<StorageDirectory> iterator() {
return dirIterator(dirType);
}
};
}
/**
* generate storage list (debug line)
*/
public String listStorageDirectories() {
StringBuilder buf = new StringBuilder();
for (StorageDirectory sd : storageDirs) {
buf.append(sd.getRoot() + "(" + sd.getStorageDirType() + ");");
}
return buf.toString();
}
/**
* One of the storage directories.
*/
@InterfaceAudience.Private
public static class StorageDirectory implements FormatConfirmable {
final File root; // root directory
// whether or not this dir is shared between two separate NNs for HA, or
// between multiple block pools in the case of federation.
final boolean isShared;
final StorageDirType dirType; // storage dir type
FileLock lock; // storage lock
private String storageUuid = null; // Storage directory identifier.
public StorageDirectory(File dir) {
// default dirType is null
this(dir, null, false);
}
public StorageDirectory(File dir, StorageDirType dirType) {
this(dir, dirType, false);
}
public void setStorageUuid(String storageUuid) {
this.storageUuid = storageUuid;
}
public String getStorageUuid() {
return storageUuid;
}
/**
* Constructor
* @param dir directory corresponding to the storage
* @param dirType storage directory type
* @param isShared whether or not this dir is shared between two NNs. true
* disables locking on the storage directory, false enables locking
*/
public StorageDirectory(File dir, StorageDirType dirType, boolean isShared) {
this.root = dir;
this.lock = null;
this.dirType = dirType;
this.isShared = isShared;
}
/**
* Get root directory of this storage
*/
public File getRoot() {
return root;
}
/**
* Get storage directory type
*/
public StorageDirType getStorageDirType() {
return dirType;
}
public void read(File from, Storage storage) throws IOException {
Properties props = readPropertiesFile(from);
storage.setFieldsFromProperties(props, this);
}
/**
* Clear and re-create storage directory.
* <p>
* Removes contents of the current directory and creates an empty directory.
*
* This does not fully format storage directory.
* It cannot write the version file since it should be written last after
* all other storage type dependent files are written.
* Derived storage is responsible for setting specific storage values and
* writing the version file to disk.
*
* @throws IOException
*/
public void clearDirectory() throws IOException {
File curDir = this.getCurrentDir();
if (curDir.exists())
if (!(FileUtil.fullyDelete(curDir)))
throw new IOException("Cannot remove current directory: " + curDir);
if (!curDir.mkdirs())
throw new IOException("Cannot create directory " + curDir);
}
/**
* Directory {@code current} contains latest files defining
* the file system meta-data.
*
* @return the directory path
*/
public File getCurrentDir() {
return new File(root, STORAGE_DIR_CURRENT);
}
/**
* File {@code VERSION} contains the following fields:
* <ol>
* <li>node type</li>
* <li>layout version</li>
* <li>namespaceID</li>
* <li>fs state creation time</li>
* <li>other fields specific for this node type</li>
* </ol>
* The version file is always written last during storage directory updates.
* The existence of the version file indicates that all other files have
* been successfully written in the storage directory, the storage is valid
* and does not need to be recovered.
*
* @return the version file path
*/
public File getVersionFile() {
return new File(new File(root, STORAGE_DIR_CURRENT), STORAGE_FILE_VERSION);
}
/**
* File {@code VERSION} from the {@code previous} directory.
*
* @return the previous version file path
*/
public File getPreviousVersionFile() {
return new File(new File(root, STORAGE_DIR_PREVIOUS), STORAGE_FILE_VERSION);
}
/**
* Directory {@code previous} contains the previous file system state,
* which the system can be rolled back to.
*
* @return the directory path
*/
public File getPreviousDir() {
return new File(root, STORAGE_DIR_PREVIOUS);
}
/**
* {@code previous.tmp} is a transient directory, which holds
* current file system state while the new state is saved into the new
* {@code current} during upgrade.
* If the saving succeeds {@code previous.tmp} will be moved to
* {@code previous}, otherwise it will be renamed back to
* {@code current} by the recovery procedure during startup.
*
* @return the directory path
*/
public File getPreviousTmp() {
return new File(root, STORAGE_TMP_PREVIOUS);
}
/**
* {@code removed.tmp} is a transient directory, which holds
* current file system state while the previous state is moved into
* {@code current} during rollback.
* If the moving succeeds {@code removed.tmp} will be removed,
* otherwise it will be renamed back to
* {@code current} by the recovery procedure during startup.
*
* @return the directory path
*/
public File getRemovedTmp() {
return new File(root, STORAGE_TMP_REMOVED);
}
/**
* {@code finalized.tmp} is a transient directory, which holds
* the {@code previous} file system state while it is being removed
* in response to the finalize request.
* Finalize operation will remove {@code finalized.tmp} when completed,
* otherwise the removal will resume upon the system startup.
*
* @return the directory path
*/
public File getFinalizedTmp() {
return new File(root, STORAGE_TMP_FINALIZED);
}
/**
* {@code lastcheckpoint.tmp} is a transient directory, which holds
* current file system state while the new state is saved into the new
* {@code current} during regular namespace updates.
* If the saving succeeds {@code lastcheckpoint.tmp} will be moved to
* {@code previous.checkpoint}, otherwise it will be renamed back to
* {@code current} by the recovery procedure during startup.
*
* @return the directory path
*/
public File getLastCheckpointTmp() {
return new File(root, STORAGE_TMP_LAST_CKPT);
}
/**
* {@code previous.checkpoint} is a directory, which holds the previous
* (before the last save) state of the storage directory.
* The directory is created as a reference only, it does not play role
* in state recovery procedures, and is recycled automatically,
* but it may be useful for manual recovery of a stale state of the system.
*
* @return the directory path
*/
public File getPreviousCheckpoint() {
return new File(root, STORAGE_PREVIOUS_CKPT);
}
/**
* Check consistency of the storage directory
*
* @param startOpt a startup option.
*
* @return state {@link StorageState} of the storage directory
* @throws InconsistentFSStateException if directory state is not
* consistent and cannot be recovered.
* @throws IOException
*/
public StorageState analyzeStorage(StartupOption startOpt, Storage storage)
throws IOException {
assert root != null : "root is null";
String rootPath = root.getCanonicalPath();
try { // check that storage exists
if (!root.exists()) {
// storage directory does not exist
if (startOpt != StartupOption.FORMAT) {
LOG.warn("Storage directory " + rootPath + " does not exist");
return StorageState.NON_EXISTENT;
}
LOG.info(rootPath + " does not exist. Creating ...");
if (!root.mkdirs())
throw new IOException("Cannot create directory " + rootPath);
}
// or is inaccessible
if (!root.isDirectory()) {
LOG.warn(rootPath + "is not a directory");
return StorageState.NON_EXISTENT;
}
if (!FileUtil.canWrite(root)) {
LOG.warn("Cannot access storage directory " + rootPath);
return StorageState.NON_EXISTENT;
}
} catch(SecurityException ex) {
LOG.warn("Cannot access storage directory " + rootPath, ex);
return StorageState.NON_EXISTENT;
}
this.lock(); // lock storage if it exists
if (startOpt == HdfsServerConstants.StartupOption.FORMAT)
return StorageState.NOT_FORMATTED;
if (startOpt != HdfsServerConstants.StartupOption.IMPORT) {
storage.checkOldLayoutStorage(this);
}
// check whether current directory is valid
File versionFile = getVersionFile();
boolean hasCurrent = versionFile.exists();
// check which directories exist
boolean hasPrevious = getPreviousDir().exists();
boolean hasPreviousTmp = getPreviousTmp().exists();
boolean hasRemovedTmp = getRemovedTmp().exists();
boolean hasFinalizedTmp = getFinalizedTmp().exists();
boolean hasCheckpointTmp = getLastCheckpointTmp().exists();
if (!(hasPreviousTmp || hasRemovedTmp
|| hasFinalizedTmp || hasCheckpointTmp)) {
// no temp dirs - no recovery
if (hasCurrent)
return StorageState.NORMAL;
if (hasPrevious)
throw new InconsistentFSStateException(root,
"version file in current directory is missing.");
return StorageState.NOT_FORMATTED;
}
if ((hasPreviousTmp?1:0) + (hasRemovedTmp?1:0)
+ (hasFinalizedTmp?1:0) + (hasCheckpointTmp?1:0) > 1)
// more than one temp dirs
throw new InconsistentFSStateException(root,
"too many temporary directories.");
// # of temp dirs == 1 should either recover or complete a transition
if (hasCheckpointTmp) {
return hasCurrent ? StorageState.COMPLETE_CHECKPOINT
: StorageState.RECOVER_CHECKPOINT;
}
if (hasFinalizedTmp) {
if (hasPrevious)
throw new InconsistentFSStateException(root,
STORAGE_DIR_PREVIOUS + " and " + STORAGE_TMP_FINALIZED
+ "cannot exist together.");
return StorageState.COMPLETE_FINALIZE;
}
if (hasPreviousTmp) {
if (hasPrevious)
throw new InconsistentFSStateException(root,
STORAGE_DIR_PREVIOUS + " and " + STORAGE_TMP_PREVIOUS
+ " cannot exist together.");
if (hasCurrent)
return StorageState.COMPLETE_UPGRADE;
return StorageState.RECOVER_UPGRADE;
}
assert hasRemovedTmp : "hasRemovedTmp must be true";
if (!(hasCurrent ^ hasPrevious))
throw new InconsistentFSStateException(root,
"one and only one directory " + STORAGE_DIR_CURRENT
+ " or " + STORAGE_DIR_PREVIOUS
+ " must be present when " + STORAGE_TMP_REMOVED
+ " exists.");
if (hasCurrent)
return StorageState.COMPLETE_ROLLBACK;
return StorageState.RECOVER_ROLLBACK;
}
/**
* Complete or recover storage state from previously failed transition.
*
* @param curState specifies what/how the state should be recovered
* @throws IOException
*/
public void doRecover(StorageState curState) throws IOException {
File curDir = getCurrentDir();
String rootPath = root.getCanonicalPath();
switch(curState) {
case COMPLETE_UPGRADE: // mv previous.tmp -> previous
LOG.info("Completing previous upgrade for storage directory "
+ rootPath);
rename(getPreviousTmp(), getPreviousDir());
return;
case RECOVER_UPGRADE: // mv previous.tmp -> current
LOG.info("Recovering storage directory " + rootPath
+ " from previous upgrade");
if (curDir.exists())
deleteDir(curDir);
rename(getPreviousTmp(), curDir);
return;
case COMPLETE_ROLLBACK: // rm removed.tmp
LOG.info("Completing previous rollback for storage directory "
+ rootPath);
deleteDir(getRemovedTmp());
return;
case RECOVER_ROLLBACK: // mv removed.tmp -> current
LOG.info("Recovering storage directory " + rootPath
+ " from previous rollback");
rename(getRemovedTmp(), curDir);
return;
case COMPLETE_FINALIZE: // rm finalized.tmp
LOG.info("Completing previous finalize for storage directory "
+ rootPath);
deleteDir(getFinalizedTmp());
return;
case COMPLETE_CHECKPOINT: // mv lastcheckpoint.tmp -> previous.checkpoint
LOG.info("Completing previous checkpoint for storage directory "
+ rootPath);
File prevCkptDir = getPreviousCheckpoint();
if (prevCkptDir.exists())
deleteDir(prevCkptDir);
rename(getLastCheckpointTmp(), prevCkptDir);
return;
case RECOVER_CHECKPOINT: // mv lastcheckpoint.tmp -> current
LOG.info("Recovering storage directory " + rootPath
+ " from failed checkpoint");
if (curDir.exists())
deleteDir(curDir);
rename(getLastCheckpointTmp(), curDir);
return;
default:
throw new IOException("Unexpected FS state: " + curState);
}
}
/**
* @return true if the storage directory should prompt the user prior
* to formatting (i.e if the directory appears to contain some data)
* @throws IOException if the SD cannot be accessed due to an IO error
*/
@Override
public boolean hasSomeData() throws IOException {
// Its alright for a dir not to exist, or to exist (properly accessible)
// and be completely empty.
if (!root.exists()) return false;
if (!root.isDirectory()) {
// a file where you expect a directory should not cause silent
// formatting
return true;
}
if (FileUtil.listFiles(root).length == 0) {
// Empty dir can format without prompt.
return false;
}
return true;
}
public boolean isShared() {
return isShared;
}
/**
* Lock storage to provide exclusive access.
*
* <p> Locking is not supported by all file systems.
* E.g., NFS does not consistently support exclusive locks.
*
* <p> If locking is supported we guarantee exclusive access to the
* storage directory. Otherwise, no guarantee is given.
*
* @throws IOException if locking fails
*/
public void lock() throws IOException {
if (isShared()) {
LOG.info("Locking is disabled");
return;
}
FileLock newLock = tryLock();
if (newLock == null) {
String msg = "Cannot lock storage " + this.root
+ ". The directory is already locked";
LOG.info(msg);
throw new IOException(msg);
}
// Don't overwrite lock until success - this way if we accidentally
// call lock twice, the internal state won't be cleared by the second
// (failed) lock attempt
lock = newLock;
}
/**
* Attempts to acquire an exclusive lock on the storage.
*
* @return A lock object representing the newly-acquired lock or
* <code>null</code> if storage is already locked.
* @throws IOException if locking fails.
*/
@SuppressWarnings("resource")
FileLock tryLock() throws IOException {
boolean deletionHookAdded = false;
File lockF = new File(root, STORAGE_FILE_LOCK);
if (!lockF.exists()) {
lockF.deleteOnExit();
deletionHookAdded = true;
}
RandomAccessFile file = new RandomAccessFile(lockF, "rws");
String jvmName = ManagementFactory.getRuntimeMXBean().getName();
FileLock res = null;
try {
res = file.getChannel().tryLock();
if (null == res) {
throw new OverlappingFileLockException();
}
file.write(jvmName.getBytes(Charsets.UTF_8));
LOG.info("Lock on " + lockF + " acquired by nodename " + jvmName);
} catch(OverlappingFileLockException oe) {
// Cannot read from the locked file on Windows.
String lockingJvmName = Path.WINDOWS ? "" : (" " + file.readLine());
LOG.error("It appears that another namenode" + lockingJvmName
+ " has already locked the storage directory");
file.close();
return null;
} catch(IOException e) {
LOG.error("Failed to acquire lock on " + lockF + ". If this storage directory is mounted via NFS, "
+ "ensure that the appropriate nfs lock services are running.", e);
file.close();
throw e;
}
if (res != null && !deletionHookAdded) {
// If the file existed prior to our startup, we didn't
// call deleteOnExit above. But since we successfully locked
// the dir, we can take care of cleaning it up.
lockF.deleteOnExit();
}
return res;
}
/**
* Unlock storage.
*
* @throws IOException
*/
public void unlock() throws IOException {
if (this.lock == null)
return;
this.lock.release();
lock.channel().close();
lock = null;
}
@Override
public String toString() {
return "Storage Directory " + this.root;
}
/**
* Check whether underlying file system supports file locking.
*
* @return <code>true</code> if exclusive locks are supported or
* <code>false</code> otherwise.
* @throws IOException
* @see StorageDirectory#lock()
*/
public boolean isLockSupported() throws IOException {
FileLock firstLock = null;
FileLock secondLock = null;
try {
firstLock = lock;
if(firstLock == null) {
firstLock = tryLock();
if(firstLock == null)
return true;
}
secondLock = tryLock();
if(secondLock == null)
return true;
} finally {
if(firstLock != null && firstLock != lock) {
firstLock.release();
firstLock.channel().close();
}
if(secondLock != null) {
secondLock.release();
secondLock.channel().close();
}
}
return false;
}
}
/**
* Create empty storage info of the specified type
*/
protected Storage(NodeType type) {
super(type);
}
protected Storage(StorageInfo storageInfo) {
super(storageInfo);
}
public int getNumStorageDirs() {
return storageDirs.size();
}
public StorageDirectory getStorageDir(int idx) {
return storageDirs.get(idx);
}
/**
* @return the storage directory, with the precondition that this storage
* has exactly one storage directory
*/
public StorageDirectory getSingularStorageDir() {
Preconditions.checkState(storageDirs.size() == 1);
return storageDirs.get(0);
}
protected void addStorageDir(StorageDirectory sd) {
storageDirs.add(sd);
}
/**
* Return true if the layout of the given storage directory is from a version
* of Hadoop prior to the introduction of the "current" and "previous"
* directories which allow upgrade and rollback.
*/
public abstract boolean isPreUpgradableLayout(StorageDirectory sd)
throws IOException;
/**
* Check if the given storage directory comes from a version of Hadoop
* prior to when the directory layout changed (ie 0.13). If this is
* the case, this method throws an IOException.
*/
private void checkOldLayoutStorage(StorageDirectory sd) throws IOException {
if (isPreUpgradableLayout(sd)) {
checkVersionUpgradable(0);
}
}
/**
* Checks if the upgrade from {@code oldVersion} is supported.
* @param oldVersion the version of the metadata to check with the current
* version
* @throws IOException if upgrade is not supported
*/
public static void checkVersionUpgradable(int oldVersion)
throws IOException {
if (oldVersion > LAST_UPGRADABLE_LAYOUT_VERSION) {
String msg = "*********** Upgrade is not supported from this " +
" older version " + oldVersion +
" of storage to the current version." +
" Please upgrade to " + LAST_UPGRADABLE_HADOOP_VERSION +
" or a later version and then upgrade to current" +
" version. Old layout version is " +
(oldVersion == 0 ? "'too old'" : (""+oldVersion)) +
" and latest layout version this software version can" +
" upgrade from is " + LAST_UPGRADABLE_LAYOUT_VERSION +
". ************";
LOG.error(msg);
throw new IOException(msg);
}
}
/**
* Iterate over each of the {@link FormatConfirmable} objects,
* potentially checking with the user whether it should be formatted.
*
* If running in interactive mode, will prompt the user for each
* directory to allow them to format anyway. Otherwise, returns
* false, unless 'force' is specified.
*
* @param force format regardless of whether dirs exist
* @param interactive prompt the user when a dir exists
* @return true if formatting should proceed
* @throws IOException if some storage cannot be accessed
*/
public static boolean confirmFormat(
Iterable<? extends FormatConfirmable> items,
boolean force, boolean interactive) throws IOException {
for (FormatConfirmable item : items) {
if (!item.hasSomeData())
continue;
if (force) { // Don't confirm, always format.
System.err.println(
"Data exists in " + item + ". Formatting anyway.");
continue;
}
if (!interactive) { // Don't ask - always don't format
System.err.println(
"Running in non-interactive mode, and data appears to exist in " +
item + ". Not formatting.");
return false;
}
if (!ToolRunner.confirmPrompt("Re-format filesystem in " + item + " ?")) {
System.err.println("Format aborted in " + item);
return false;
}
}
return true;
}
/**
* Interface for classes which need to have the user confirm their
* formatting during NameNode -format and other similar operations.
*
* This is currently a storage directory or journal manager.
*/
@InterfaceAudience.Private
public interface FormatConfirmable {
/**
* @return true if the storage seems to have some valid data in it,
* and the user should be required to confirm the format. Otherwise,
* false.
* @throws IOException if the storage cannot be accessed at all.
*/
public boolean hasSomeData() throws IOException;
/**
* @return a string representation of the formattable item, suitable
* for display to the user inside a prompt
*/
@Override
public String toString();
}
/**
* Set common storage fields into the given properties object.
* Should be overloaded if additional fields need to be set.
*
* @param props the Properties object to write into
*/
protected void setPropertiesFromFields(Properties props,
StorageDirectory sd)
throws IOException {
props.setProperty("layoutVersion", String.valueOf(layoutVersion));
props.setProperty("storageType", storageType.toString());
props.setProperty("namespaceID", String.valueOf(namespaceID));
// Set clusterID in version with federation support
if (versionSupportsFederation(getServiceLayoutFeatureMap())) {
props.setProperty("clusterID", clusterID);
}
props.setProperty("cTime", String.valueOf(cTime));
}
/**
* Write properties to the VERSION file in the given storage directory.
*/
public void writeProperties(StorageDirectory sd) throws IOException {
writeProperties(sd.getVersionFile(), sd);
}
public void writeProperties(File to, StorageDirectory sd) throws IOException {
Properties props = new Properties();
setPropertiesFromFields(props, sd);
writeProperties(to, sd, props);
}
public static void writeProperties(File to, StorageDirectory sd,
Properties props) throws IOException {
RandomAccessFile file = new RandomAccessFile(to, "rws");
FileOutputStream out = null;
try {
file.seek(0);
out = new FileOutputStream(file.getFD());
/*
* If server is interrupted before this line,
* the version file will remain unchanged.
*/
props.store(out, null);
/*
* Now the new fields are flushed to the head of the file, but file
* length can still be larger then required and therefore the file can
* contain whole or corrupted fields from its old contents in the end.
* If server is interrupted here and restarted later these extra fields
* either should not effect server behavior or should be handled
* by the server correctly.
*/
file.setLength(out.getChannel().position());
} finally {
if (out != null) {
out.close();
}
file.close();
}
}
public static void rename(File from, File to) throws IOException {
if (!from.renameTo(to))
throw new IOException("Failed to rename "
+ from.getCanonicalPath() + " to " + to.getCanonicalPath());
}
/**
* Recursively delete all the content of the directory first and then
* the directory itself from the local filesystem.
* @param dir The directory to delete
* @throws IOException
*/
public static void deleteDir(File dir) throws IOException {
if (!FileUtil.fullyDelete(dir))
throw new IOException("Failed to delete " + dir.getCanonicalPath());
}
/**
* Write all data storage files.
* @throws IOException
*/
public void writeAll() throws IOException {
this.layoutVersion = getServiceLayoutVersion();
for (Iterator<StorageDirectory> it = storageDirs.iterator(); it.hasNext();) {
writeProperties(it.next());
}
}
/**
* Unlock all storage directories.
* @throws IOException
*/
public void unlockAll() throws IOException {
for (Iterator<StorageDirectory> it = storageDirs.iterator(); it.hasNext();) {
it.next().unlock();
}
}
public static String getBuildVersion() {
return VersionInfo.getRevision();
}
public static String getRegistrationID(StorageInfo storage) {
return "NS-" + Integer.toString(storage.getNamespaceID())
+ "-" + storage.getClusterID()
+ "-" + Long.toString(storage.getCTime());
}
public static boolean is203LayoutVersion(int layoutVersion) {
for (int lv203 : LAYOUT_VERSIONS_203) {
if (lv203 == layoutVersion) {
return true;
}
}
return false;
}
}
|
apache/openjpa | 35,855 | openjpa-kernel/src/main/java/org/apache/openjpa/event/TCPRemoteCommitProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.event;
import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.net.ServerSocket;
import java.net.Socket;
import java.net.SocketException;
import java.net.UnknownHostException;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.pool2.BasePooledObjectFactory;
import org.apache.commons.pool2.PooledObject;
import org.apache.commons.pool2.impl.DefaultPooledObject;
import org.apache.commons.pool2.impl.GenericObjectPool;
import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
import org.apache.openjpa.lib.conf.Configurable;
import org.apache.openjpa.lib.log.Log;
import org.apache.openjpa.lib.util.J2DoPrivHelper;
import org.apache.openjpa.lib.util.Localizer;
import org.apache.openjpa.lib.util.StringUtil;
import org.apache.openjpa.util.GeneralException;
import org.apache.openjpa.util.InternalException;
import org.apache.openjpa.util.Serialization;
/**
* TCP-based implementation of {@link RemoteCommitProvider} that
* listens for object modifications and propagates those changes to
* other RemoteCommitProviders over TCP sockets.
*
* @author Brian Leair
* @author Patrick Linskey
* @since 0.2.5.0
*/
public class TCPRemoteCommitProvider
extends AbstractRemoteCommitProvider
implements Configurable {
private static final int DEFAULT_PORT = 5636;
protected static final Localizer s_loc = Localizer.forPackage(TCPRemoteCommitProvider.class);
private static long s_idSequence = System.currentTimeMillis();
// A map of listen ports to listeners in this JVM. We might
// want to look into allowing same port, different interface --
// that is not currently possible in a single JVM.
private static final Map<String, TCPPortListener> s_portListenerMap = new HashMap<>();
private final long _id;
private final byte[] _localhost;
protected int _port = DEFAULT_PORT;
private int _maxTotal = 2;
private int _maxIdle = 2;
private int _recoveryTimeMillis = 15000;
private TCPPortListener _listener;
private final BroadcastQueue _broadcastQueue = new BroadcastQueue();
private final List<BroadcastWorkerThread> _broadcastThreads = Collections.synchronizedList(new LinkedList<>());
protected List<HostAddress> _addresses = new ArrayList<>();
protected final ReentrantLock _addressesLock;
public TCPRemoteCommitProvider() throws UnknownHostException {
// obtain a unique ID.
synchronized (TCPRemoteCommitProvider.class) {
_id = s_idSequence++;
}
// cache the local IP address.
_localhost = InetAddress.getLocalHost().getAddress();
_addressesLock = new ReentrantLock();
setNumBroadcastThreads(2);
}
/**
* @return the port that this provider should listen on.
*/
public int getPort() {
return _port;
}
/**
* Set the port that this provider should listen on. Set once only.
*
* @param port the port that this provider should listen on
*/
public void setPort(final int port) {
_port = port;
}
/**
* Set the number of milliseconds to wait before retrying to reconnect to a peer after it becomes unreachable.
*
* @param recoverytime the number of milliseconds to wait before retrying to reconnect to a peer after it becomes
* unreachable
*/
public void setRecoveryTimeMillis(final int recoverytime) {
_recoveryTimeMillis = recoverytime;
}
/**
* @return the number of milliseconds to wait before retrying to reconnect to a peer after it becomes unreachable.
*/
public int getRecoveryTimeMillis() {
return _recoveryTimeMillis;
}
/**
* Set the maximum number of sockets that this provider can simultaneously open to each peer in the cluster.
*
* @param maxActive the maximum total number of sockets that this provider can simultaneously open to each peer in
* the cluster. * @deprecated please use {@link TCPRemoteCommitProvider#setMaxTotal(int)} instead
*/
@Deprecated
public void setMaxActive(final int maxActive) {
log.warn("This method should not be used");
_maxTotal = maxActive;
}
/**
* Set the maximum total number of sockets that this provider can simultaneously open to each peer in the cluster.
*
* @param maxTotal the maximum total number of sockets that this provider can simultaneously open to each peer in
* the cluster.
*/
public void setMaxTotal(final int maxTotal) {
_maxTotal = maxTotal;
}
/**
* @return the maximum number of sockets that this provider can simultaneously open to each peer in the cluster.
*/
public int getMaxTotal() {
return _maxTotal;
}
/**
* Set the number of idle sockets that this provider can keep open to each peer in the cluster.
*
* @param maxIdle the number of idle sockets that this provider can keep open to each peer in the cluster
*/
public void setMaxIdle(final int maxIdle) {
_maxIdle = maxIdle;
}
/**
* @return the number of idle sockets that this provider can keep open to each peer in the cluster.
*/
public int getMaxIdle() {
return _maxIdle;
}
/**
* Set the number of worker threads that are used for transmitting packets to peers in the cluster.
*
* @param numBroadcastThreads the number of worker threads that are used for transmitting packets to peers in the
* cluster
*/
public void setNumBroadcastThreads(final int numBroadcastThreads) {
synchronized (_broadcastThreads) {
int cur = _broadcastThreads.size();
if (cur > numBroadcastThreads) {
// Notify the extra worker threads so they stop themselves
// Threads will not end until they send another pk.
for (int i = numBroadcastThreads; i < cur; i++) {
BroadcastWorkerThread worker = _broadcastThreads.remove(0);
worker.setRunning(false);
}
} else if (cur < numBroadcastThreads) {
// Create additional worker threads
for (int i = cur; i < numBroadcastThreads; i++) {
BroadcastWorkerThread wt = new BroadcastWorkerThread();
wt.setDaemon(true);
wt.start();
_broadcastThreads.add(wt);
}
}
}
}
/**
* @return the number of worker threads that are used for transmitting packets to peers in the cluster.
*/
public int getNumBroadcastThreads() {
return _broadcastThreads.size();
}
/**
* Sets the list of addresses of peers to which this provider will send events to.
* The peers are semicolon-separated <code>names</code> list in the form of "myhost1:portA;myhost2:portB".
*
* @param names the list of addresses of peers to which this provider will send events to
* @throws UnknownHostException in case peer name cannot be resolved
*/
public void setAddresses(final String names) throws UnknownHostException {
// NYI. Could look for equivalence of addresses and avoid changing those that didn't change.
_addressesLock.lock();
try {
_addresses.forEach(HostAddress::close);
String[] toks = StringUtil.split(names, ";", 0);
_addresses = new ArrayList<>(toks.length);
InetAddress localhost = InetAddress.getLocalHost();
String localhostName = localhost.getHostName();
for (String host : toks) {
String hostname;
int tmpPort;
int colon = host.indexOf(':');
if (colon != -1) {
hostname = host.substring(0, colon);
tmpPort = Integer.parseInt(host.substring(colon + 1));
} else {
hostname = host;
tmpPort = DEFAULT_PORT;
}
InetAddress tmpAddress = AccessController.doPrivileged(J2DoPrivHelper.getByNameAction(hostname));
// bleair: For each address we would rather make use of
// the jdk1.4 isLinkLocalAddress () || isLoopbackAddress ().
// (Though in practice on win32 they don't work anyways!)
// Instead we will check hostname. Not perfect, but
// it will match often enough (people will typically
// use the DNS machine names and be cutting/pasting.)
if (localhostName.equals(hostname)) {
// This string matches the hostname for for ourselves, we
// don't actually need to send ourselves messages.
if (log.isTraceEnabled()) {
log.trace(s_loc.get("tcp-address-asself", tmpAddress.getHostName() + ":" + tmpPort));
}
} else {
HostAddress newAddress = new HostAddress(host);
_addresses.add(newAddress);
if (log.isTraceEnabled()) {
log.trace(s_loc.get("tcp-address-set",
newAddress._address.getHostName() + ":" + newAddress._port));
}
}
}
} catch (PrivilegedActionException pae) {
throw (UnknownHostException) pae.getException();
} finally {
_addressesLock.unlock();
}
}
// ---------- Configurable implementation ----------
/**
* Subclasses that need to perform actions in
* {@link Configurable#endConfiguration} must invoke this method.
*/
@Override
public void endConfiguration() {
super.endConfiguration();
synchronized (s_portListenerMap) {
// see if a listener exists for this port.
_listener = s_portListenerMap.get(String.valueOf(_port));
if (_listener == null || (!_listener.isRunning() && _listener._port == _port)) {
try {
_listener = new TCPPortListener(_port, log);
_listener.listen();
s_portListenerMap.put(String.valueOf(_port), _listener);
} catch (Exception e) {
throw new GeneralException(s_loc.get("tcp-init-exception", String.valueOf(_port)), e).
setFatal(true);
}
} else if (_listener.isRunning()) {
if (_listener._port != _port) {
// this really shouldn't be able to happen.
throw new GeneralException(s_loc.get("tcp-not-equal", String.valueOf(_port))).setFatal(true);
}
} else {
throw new InternalException(s_loc.get("tcp-listener-broken"));
}
_listener.addProvider(this);
}
_addressesLock.lock();
try {
_addresses.forEach(curAddress -> {
curAddress.setMaxTotal(_maxTotal);
curAddress.setMaxIdle(_maxIdle);
});
} finally {
_addressesLock.unlock();
}
}
// ---------- RemoteCommitProvider implementation ----------
// pre 3.3.4 = <no version number transmitted>
// 3.3 Preview = 0x1428acfd;
// 3.4 = 0x1428acff;
private static final long PROTOCOL_VERSION = 0x1428acff;
@Override
public void broadcast(final RemoteCommitEvent event) {
// build a packet notifying other JVMs of object changes.
try (ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos)) {
oos.writeLong(PROTOCOL_VERSION);
oos.writeLong(_id);
oos.writeInt(_port);
oos.writeObject(_localhost);
oos.writeObject(event);
oos.flush();
byte[] bytes = baos.toByteArray();
baos.close();
if (_broadcastThreads.isEmpty()) {
sendUpdatePacket(bytes);
} else {
_broadcastQueue.addPacket(bytes);
}
} catch (IOException ioe) {
if (log.isWarnEnabled()) {
log.warn(s_loc.get("tcp-payload-create-error"), ioe);
}
}
}
/**
* Sends a change notification packet to other machines in this
* provider cluster.
*/
private void sendUpdatePacket(final byte[] bytes) {
_addressesLock.lock();
try {
_addresses.forEach(address -> address.sendUpdatePacket(bytes));
} finally {
_addressesLock.unlock();
}
}
@Override
public void close() {
if (_listener != null) {
_listener.removeProvider(this);
}
// Remove Broadcast Threads then close sockets.
_broadcastQueue.close();
// Wait for _broadcastThreads to get cleaned up.
while(!_broadcastThreads.isEmpty()) {
try {
Thread.sleep(500);
} catch (InterruptedException ie) {
// Ignore.
}
}
_addressesLock.lock();
try {
_addresses.forEach(HostAddress::close);
} finally {
_addressesLock.unlock();
}
}
/**
* Utility class to hold messages to be sent. This
* allows calls to broadcast () to return without
* waiting for the send to complete.
*/
private static class BroadcastQueue {
private final LinkedList<byte[]> _packetQueue = new LinkedList<>();
private boolean _closed = false;
public synchronized void close() {
_closed = true;
notifyAll();
}
public synchronized boolean isClosed() {
return _closed;
}
public synchronized void addPacket(final byte[] bytes) {
_packetQueue.addLast(bytes);
notify();
}
/**
* @return the bytes defining the packet to process, or
* <code>null</code> if the queue is empty.
*/
public synchronized byte[] removePacket() throws InterruptedException {
// only wait if the queue is still open. This allows processing
// of events in the queue to continue, while avoiding sleeping
// during shutdown.
while (!_closed && _packetQueue.isEmpty()) {
wait();
}
if (_packetQueue.isEmpty()) {
return null;
} else {
return _packetQueue.removeFirst();
}
}
}
/**
* Threads to broadcast packets placed in the {@link BroadcastQueue}.
*/
private class BroadcastWorkerThread
extends Thread {
private boolean _keepRunning = true;
@Override
public void run() {
while (_keepRunning) {
try {
// This will block until there is a packet to send, or
// until the queue is closed.
byte[] bytes = _broadcastQueue.removePacket();
if (bytes != null) {
sendUpdatePacket(bytes);
} else if (_broadcastQueue.isClosed()) {
_keepRunning = false;
}
} catch (InterruptedException e) {
// End the thread.
break;
}
}
remove();
}
public void setRunning(final boolean keepRunning) {
_keepRunning = keepRunning;
}
private void remove() {
_broadcastThreads.remove(this);
}
}
/**
* Responsible for listening for incoming packets and processing them.
*/
private static final class TCPPortListener implements Runnable {
private final Log _log;
private ServerSocket _receiveSocket;
private Thread _acceptThread;
private Set<Thread> _receiverThreads = new HashSet<>();
private final Set<TCPRemoteCommitProvider> _providers = new HashSet<>();
/**
* Cache the local IP address
*/
private final byte[] _localhost;
/**
* The port that this listener should listen on. Configured
* by TCPRemoteCommitProvider.
*/
private int _port;
/**
* Should be set to <code>true</code> once the listener is listening.
*/
private boolean _isRunning = false;
/**
* Construct a new TCPPortListener configured to use the specified port.
*/
private TCPPortListener(final int port, final Log log) throws IOException {
_port = port;
_log = log;
try {
_receiveSocket = AccessController.doPrivileged(J2DoPrivHelper.newServerSocketAction(_port));
} catch (PrivilegedActionException pae) {
throw (IOException) pae.getException();
}
_localhost = InetAddress.getLocalHost().getAddress();
if (_log.isTraceEnabled()) {
_log.info(s_loc.get("tcp-start-listener", String.valueOf(_port)));
}
}
private void listen() {
_acceptThread = new Thread(this);
_acceptThread.setDaemon(true);
_acceptThread.start();
}
/**
* All providers added here will be notified of any incoming provider messages. There will be one of these per
* BrokerFactory in a given JVM.
* {@link TCPRemoteCommitProvider#endConfiguration} invokes <code>addProvider</code> with <code>this</code> upon
* completion of configuration.
*/
private void addProvider(final TCPRemoteCommitProvider provider) {
synchronized (_providers) {
_providers.add(provider);
}
}
/**
* Remove a provider from the list of providers to notify of commit events.
*/
private synchronized void removeProvider(final TCPRemoteCommitProvider provider) {
synchronized (_providers) {
_providers.remove(provider);
// if the provider list is empty, shut down the thread.
if (_providers.isEmpty()) {
_isRunning = false;
try {
_receiveSocket.close();
} catch (IOException ioe) {
if (_log.isWarnEnabled()) {
_log.warn(s_loc.get("tcp-close-error"), ioe);
}
}
_acceptThread.interrupt();
}
}
}
private boolean isRunning() {
synchronized (_providers) {
return _isRunning;
}
}
@Override
public void run() {
synchronized (_providers) {
_isRunning = true;
}
Socket s = null;
while (_isRunning) {
try {
s = null;
// Block, waiting to accept new connection from a peer
s = AccessController.doPrivileged(J2DoPrivHelper.acceptAction(_receiveSocket));
if (_log.isTraceEnabled()) {
_log.trace(s_loc.get("tcp-received-connection",
s.getInetAddress().getHostAddress() + ":" + s.getPort()));
}
ReceiveSocketHandler sh = new ReceiveSocketHandler(s);
Thread receiverThread = new Thread(sh);
receiverThread.setDaemon(true);
receiverThread.start();
_receiverThreads.add(receiverThread);
} catch (Exception e) {
if (e instanceof PrivilegedActionException) {
e = ((PrivilegedActionException) e).getException();
}
if (!(e instanceof SocketException) || _isRunning) {
if (_log.isWarnEnabled()) {
_log.warn(s_loc.get("tcp-accept-error"), e);
}
}
// Nominal case (InterruptedException) because close ()
// calls _acceptThread.interrupt ();
try {
if (s != null) {
s.close();
}
} catch (Exception ee) {
if (_log.isWarnEnabled()) {
_log.warn(s_loc.get("tcp-close-error"), e);
}
}
}
}
// We are done listening. Interrupt any worker threads.
_receiverThreads.forEach(Thread::interrupt);
synchronized (_providers) {
try {
if (_isRunning) {
_receiveSocket.close();
}
} catch (Exception e) {
if (_log.isWarnEnabled()) {
_log.warn(s_loc.get("tcp-close-error"), e);
}
}
_isRunning = false;
if (_log.isTraceEnabled()) {
_log.trace(s_loc.get("tcp-close-listener", _port + ""));
}
}
}
/**
* Utility class that acts as a worker thread to receive Events
* from broadcasters.
*/
private final class ReceiveSocketHandler implements Runnable {
private InputStream _in;
private Socket _s;
private ReceiveSocketHandler(final Socket s) {
// We are the receiving end and we don't send any messages
// back to the broadcaster. Turn off Nagle's so that
// we will send ack packets without waiting.
_s = s;
try {
_s.setTcpNoDelay(true);
_in = new BufferedInputStream(s.getInputStream());
} catch (IOException ioe) {
if (_log.isInfoEnabled()) {
_log.info(s_loc.get("tcp-socket-option-error"), ioe);
}
_s = null;
} catch (Exception e) {
if (_log.isWarnEnabled()) {
_log.warn(s_loc.get("tcp-receive-error"), e);
}
_s = null;
}
}
@Override
public void run() {
if (_s == null) {
return;
}
while (_isRunning && _s != null) {
try {
// This will block our thread, waiting to read
// the next Event-object-message.
handle(_in);
} catch (EOFException eof) {
// EOFException raised when peer is properly
// closing its end.
if (_log.isTraceEnabled()) {
_log.trace(s_loc.get("tcp-close-socket",
_s.getInetAddress().getHostAddress() + ":" + _s.getPort()));
}
break;
} catch (Throwable e) {
if (_log.isWarnEnabled()) {
_log.warn(s_loc.get("tcp-receive-error"), e);
}
break;
}
}
// We are done receiving on this socket and this worker
// thread is terminating.
try {
_in.close();
if (_s != null) {
_s.close();
}
} catch (IOException e) {
_log.warn(s_loc.get("tcp-close-socket-error",
_s.getInetAddress().getHostAddress() + ":" + _s.getPort()), e);
}
}
/**
* Process an {@link InputStream} containing objects written
* by {@link TCPRemoteCommitProvider#broadcast(RemoteCommitEvent)}.
*/
private void handle(final InputStream in) throws IOException, ClassNotFoundException {
// This will block waiting for the next
ObjectInputStream ois = new Serialization.ClassResolvingObjectInputStream(in);
long protocolVersion = ois.readLong();
if (protocolVersion != PROTOCOL_VERSION) {
if (_log.isWarnEnabled()) {
_log.warn(s_loc.get("tcp-wrong-version-error",
_s.getInetAddress().getHostAddress() + ":" + _s.getPort()));
return;
}
}
long senderId = ois.readLong();
int senderPort = ois.readInt();
byte[] senderAddress = (byte[]) ois.readObject();
RemoteCommitEvent rce = (RemoteCommitEvent) ois.readObject();
if (_log.isTraceEnabled()) {
_log.trace(s_loc.get("tcp-received-event",
_s.getInetAddress().getHostAddress() + ":"
+ _s.getPort()));
}
boolean fromSelf = senderPort == _port && Arrays.equals(senderAddress, _localhost);
synchronized (_providers) {
// bleair: We're iterating, but currenlty there can really
// only be a single provider.
_providers.stream().filter(provider -> senderId != provider._id || !fromSelf).
forEach(provider -> provider.eventManager.fireEvent(rce));
}
}
}
}
/**
* Utility class to store an InetAddress and an int. Not using
* InetSocketAddress because it's a JDK1.4 API. This also
* provides a wrapper around the socket(s) associated with this address.
*/
protected class HostAddress {
protected InetAddress _address;
protected int _port;
protected long _timeLastError; // millis
protected boolean _isAvailable; // is peer thought to be up
protected int _infosIssued = 0; // limit log entries
protected final GenericObjectPool<Socket> _socketPool; // reusable open sockets
/**
* Construct a new host address from a string of the form "host:port" or of the form "host".
* @param host host name
*/
public HostAddress(final String host) throws UnknownHostException {
int colon = host.indexOf(':');
try {
if (colon != -1) {
_address = AccessController
.doPrivileged(J2DoPrivHelper.getByNameAction(host.substring(0, colon)));
_port = Integer.parseInt(host.substring(colon + 1));
} else {
_address = AccessController.doPrivileged(J2DoPrivHelper.getByNameAction(host));
_port = DEFAULT_PORT;
}
} catch (PrivilegedActionException pae) {
throw (UnknownHostException) pae.getException();
}
GenericObjectPoolConfig<Socket> cfg = new GenericObjectPoolConfig<>();
cfg.setMaxTotal(_maxTotal);
cfg.setBlockWhenExhausted(true);
cfg.setMaxWaitMillis(-1L);
// -1 max wait == as long as it takes
_socketPool = new GenericObjectPool<>(new SocketPoolableObjectFactory(), cfg);
_isAvailable = true;
}
protected void setMaxTotal(final int maxTotal) {
_socketPool.setMaxTotal(maxTotal);
}
protected void setMaxIdle(final int maxIdle) {
_socketPool.setMaxIdle(maxIdle);
}
public InetAddress getAddress() {
return _address;
}
public int getPort() {
return _port;
}
public void close() {
// Close the pool of sockets to this peer. This
// will close all sockets in the pool.
try {
_socketPool.close();
} catch (Exception e) {
if (log.isWarnEnabled()) {
log.warn(s_loc.get("tcp-close-pool-error"), e);
}
}
}
protected void sendUpdatePacket(byte[] bytes) {
if (!_isAvailable) {
long now = System.currentTimeMillis();
if (now - _timeLastError < _recoveryTimeMillis) {
// Not enough time has passed since the last error
return;
}
}
Socket s = null;
try {
s = getSocket();
OutputStream os = s.getOutputStream();
os.write(bytes);
os.flush();
if (log.isTraceEnabled()) {
log.trace(s_loc.get("tcp-sent-update",
_address.getHostAddress() + ":" + _port, String.valueOf(s.getLocalPort())));
}
_isAvailable = true;
_infosIssued = 0;
// Return the socket to the pool; the socket is
// still good.
returnSocket(s);
} catch (Exception e) {
// There has been a problem sending to the peer.
// The OS socket that was being used is can no longer
// be used.
if (s != null) {
this.closeSocket(s);
}
this.clearAllSockets();
if (_isAvailable) {
// Log a warning, the peer was up and has now gone down
if (log.isWarnEnabled()) {
log.warn(s_loc.get("tcp-send-error", _address.getHostAddress() + ":" + _port), e);
}
_isAvailable = false;
// Once enough time has passed we will log another warning
_timeLastError = System.currentTimeMillis();
} else {
long now = System.currentTimeMillis();
if (now - _timeLastError > _recoveryTimeMillis) {
if (_infosIssued < 5) {
// Enough time has passed, and peer is still down
_timeLastError = System.currentTimeMillis();
// We were trying to reestablish the connection,
// but we failed again. Log a message, but
// lower severity. This log will occur periodically
// for 5 times until the peer comes back.
if (log.isInfoEnabled()) {
log.info(s_loc.get("tcp-send-still-error", _address.getHostAddress() + ":" + _port), e);
}
_infosIssued++;
}
}
}
}
}
protected Socket getSocket() throws Exception {
return _socketPool.borrowObject();
}
protected void returnSocket(final Socket s) throws Exception {
_socketPool.returnObject(s);
}
protected void clearAllSockets() {
_socketPool.clear();
}
protected void closeSocket(final Socket s) {
// All sockets come from the pool.
// This socket is no longer usable, so delete it from the
// pool.
try {
_socketPool.invalidateObject(s);
} catch (Exception e) {
}
}
/**
* Factory for pooled sockets.
*/
protected class SocketPoolableObjectFactory extends BasePooledObjectFactory<Socket> {
@Override
public Socket create() throws Exception {
try {
Socket s = AccessController.doPrivileged(J2DoPrivHelper.newSocketAction(_address, _port));
if (log.isTraceEnabled()) {
log.trace(s_loc.get("tcp-open-connection", _address + ":" + _port, "" + s.getLocalPort()));
}
return s;
} catch (PrivilegedActionException pae) {
throw (IOException) pae.getException();
}
}
@Override
public PooledObject<Socket> wrap(final Socket obj) {
return new DefaultPooledObject<>(obj);
}
@Override
public void destroyObject(final PooledObject<Socket> p) throws Exception {
try (Socket s = p.getObject()) {
if (log.isTraceEnabled()) {
log.trace(s_loc.get("tcp-close-sending-socket", _address + ":" + _port, "" + s.getLocalPort()));
}
} catch (Exception e) {
log.warn(s_loc.get("tcp-close-socket-error", _address.getHostAddress() + ":" + _port), e);
}
}
}
@Override
public int hashCode() {
int hash = 7;
hash = 37 * hash + Objects.hashCode(this._address);
hash = 37 * hash + this._port;
return hash;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final HostAddress other = (HostAddress) obj;
if (this._port != other._port) {
return false;
}
return Objects.equals(this._address, other._address);
}
}
}
|
googleapis/google-cloud-java | 35,659 | java-artifact-registry/proto-google-cloud-artifact-registry-v1beta2/src/main/java/com/google/devtools/artifactregistry/v1beta2/ListTagsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1beta2/tag.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1beta2;
/**
*
*
* <pre>
* The response from listing tags.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1beta2.ListTagsResponse}
*/
public final class ListTagsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1beta2.ListTagsResponse)
ListTagsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListTagsResponse.newBuilder() to construct.
private ListTagsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListTagsResponse() {
tags_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListTagsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1beta2.TagProto
.internal_static_google_devtools_artifactregistry_v1beta2_ListTagsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1beta2.TagProto
.internal_static_google_devtools_artifactregistry_v1beta2_ListTagsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1beta2.ListTagsResponse.class,
com.google.devtools.artifactregistry.v1beta2.ListTagsResponse.Builder.class);
}
public static final int TAGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.devtools.artifactregistry.v1beta2.Tag> tags_;
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.devtools.artifactregistry.v1beta2.Tag> getTagsList() {
return tags_;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.devtools.artifactregistry.v1beta2.TagOrBuilder>
getTagsOrBuilderList() {
return tags_;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
@java.lang.Override
public int getTagsCount() {
return tags_.size();
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.Tag getTags(int index) {
return tags_.get(index);
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.TagOrBuilder getTagsOrBuilder(int index) {
return tags_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to retrieve the next page of tags, or empty if there are no
* more tags to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of tags, or empty if there are no
* more tags to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < tags_.size(); i++) {
output.writeMessage(1, tags_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < tags_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, tags_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1beta2.ListTagsResponse)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1beta2.ListTagsResponse other =
(com.google.devtools.artifactregistry.v1beta2.ListTagsResponse) obj;
if (!getTagsList().equals(other.getTagsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTagsCount() > 0) {
hash = (37 * hash) + TAGS_FIELD_NUMBER;
hash = (53 * hash) + getTagsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1beta2.ListTagsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response from listing tags.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1beta2.ListTagsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1beta2.ListTagsResponse)
com.google.devtools.artifactregistry.v1beta2.ListTagsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1beta2.TagProto
.internal_static_google_devtools_artifactregistry_v1beta2_ListTagsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1beta2.TagProto
.internal_static_google_devtools_artifactregistry_v1beta2_ListTagsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1beta2.ListTagsResponse.class,
com.google.devtools.artifactregistry.v1beta2.ListTagsResponse.Builder.class);
}
// Construct using com.google.devtools.artifactregistry.v1beta2.ListTagsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (tagsBuilder_ == null) {
tags_ = java.util.Collections.emptyList();
} else {
tags_ = null;
tagsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1beta2.TagProto
.internal_static_google_devtools_artifactregistry_v1beta2_ListTagsResponse_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.ListTagsResponse
getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1beta2.ListTagsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.ListTagsResponse build() {
com.google.devtools.artifactregistry.v1beta2.ListTagsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.ListTagsResponse buildPartial() {
com.google.devtools.artifactregistry.v1beta2.ListTagsResponse result =
new com.google.devtools.artifactregistry.v1beta2.ListTagsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.devtools.artifactregistry.v1beta2.ListTagsResponse result) {
if (tagsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
tags_ = java.util.Collections.unmodifiableList(tags_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.tags_ = tags_;
} else {
result.tags_ = tagsBuilder_.build();
}
}
private void buildPartial0(
com.google.devtools.artifactregistry.v1beta2.ListTagsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1beta2.ListTagsResponse) {
return mergeFrom((com.google.devtools.artifactregistry.v1beta2.ListTagsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.devtools.artifactregistry.v1beta2.ListTagsResponse other) {
if (other
== com.google.devtools.artifactregistry.v1beta2.ListTagsResponse.getDefaultInstance())
return this;
if (tagsBuilder_ == null) {
if (!other.tags_.isEmpty()) {
if (tags_.isEmpty()) {
tags_ = other.tags_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTagsIsMutable();
tags_.addAll(other.tags_);
}
onChanged();
}
} else {
if (!other.tags_.isEmpty()) {
if (tagsBuilder_.isEmpty()) {
tagsBuilder_.dispose();
tagsBuilder_ = null;
tags_ = other.tags_;
bitField0_ = (bitField0_ & ~0x00000001);
tagsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTagsFieldBuilder()
: null;
} else {
tagsBuilder_.addAllMessages(other.tags_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.devtools.artifactregistry.v1beta2.Tag m =
input.readMessage(
com.google.devtools.artifactregistry.v1beta2.Tag.parser(),
extensionRegistry);
if (tagsBuilder_ == null) {
ensureTagsIsMutable();
tags_.add(m);
} else {
tagsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.devtools.artifactregistry.v1beta2.Tag> tags_ =
java.util.Collections.emptyList();
private void ensureTagsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
tags_ = new java.util.ArrayList<com.google.devtools.artifactregistry.v1beta2.Tag>(tags_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1beta2.Tag,
com.google.devtools.artifactregistry.v1beta2.Tag.Builder,
com.google.devtools.artifactregistry.v1beta2.TagOrBuilder>
tagsBuilder_;
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1beta2.Tag> getTagsList() {
if (tagsBuilder_ == null) {
return java.util.Collections.unmodifiableList(tags_);
} else {
return tagsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public int getTagsCount() {
if (tagsBuilder_ == null) {
return tags_.size();
} else {
return tagsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.Tag getTags(int index) {
if (tagsBuilder_ == null) {
return tags_.get(index);
} else {
return tagsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public Builder setTags(int index, com.google.devtools.artifactregistry.v1beta2.Tag value) {
if (tagsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTagsIsMutable();
tags_.set(index, value);
onChanged();
} else {
tagsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public Builder setTags(
int index, com.google.devtools.artifactregistry.v1beta2.Tag.Builder builderForValue) {
if (tagsBuilder_ == null) {
ensureTagsIsMutable();
tags_.set(index, builderForValue.build());
onChanged();
} else {
tagsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public Builder addTags(com.google.devtools.artifactregistry.v1beta2.Tag value) {
if (tagsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTagsIsMutable();
tags_.add(value);
onChanged();
} else {
tagsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public Builder addTags(int index, com.google.devtools.artifactregistry.v1beta2.Tag value) {
if (tagsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTagsIsMutable();
tags_.add(index, value);
onChanged();
} else {
tagsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public Builder addTags(
com.google.devtools.artifactregistry.v1beta2.Tag.Builder builderForValue) {
if (tagsBuilder_ == null) {
ensureTagsIsMutable();
tags_.add(builderForValue.build());
onChanged();
} else {
tagsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public Builder addTags(
int index, com.google.devtools.artifactregistry.v1beta2.Tag.Builder builderForValue) {
if (tagsBuilder_ == null) {
ensureTagsIsMutable();
tags_.add(index, builderForValue.build());
onChanged();
} else {
tagsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public Builder addAllTags(
java.lang.Iterable<? extends com.google.devtools.artifactregistry.v1beta2.Tag> values) {
if (tagsBuilder_ == null) {
ensureTagsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, tags_);
onChanged();
} else {
tagsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public Builder clearTags() {
if (tagsBuilder_ == null) {
tags_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
tagsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public Builder removeTags(int index) {
if (tagsBuilder_ == null) {
ensureTagsIsMutable();
tags_.remove(index);
onChanged();
} else {
tagsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.Tag.Builder getTagsBuilder(int index) {
return getTagsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.TagOrBuilder getTagsOrBuilder(int index) {
if (tagsBuilder_ == null) {
return tags_.get(index);
} else {
return tagsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public java.util.List<? extends com.google.devtools.artifactregistry.v1beta2.TagOrBuilder>
getTagsOrBuilderList() {
if (tagsBuilder_ != null) {
return tagsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(tags_);
}
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.Tag.Builder addTagsBuilder() {
return getTagsFieldBuilder()
.addBuilder(com.google.devtools.artifactregistry.v1beta2.Tag.getDefaultInstance());
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public com.google.devtools.artifactregistry.v1beta2.Tag.Builder addTagsBuilder(int index) {
return getTagsFieldBuilder()
.addBuilder(index, com.google.devtools.artifactregistry.v1beta2.Tag.getDefaultInstance());
}
/**
*
*
* <pre>
* The tags returned.
* </pre>
*
* <code>repeated .google.devtools.artifactregistry.v1beta2.Tag tags = 1;</code>
*/
public java.util.List<com.google.devtools.artifactregistry.v1beta2.Tag.Builder>
getTagsBuilderList() {
return getTagsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1beta2.Tag,
com.google.devtools.artifactregistry.v1beta2.Tag.Builder,
com.google.devtools.artifactregistry.v1beta2.TagOrBuilder>
getTagsFieldBuilder() {
if (tagsBuilder_ == null) {
tagsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.devtools.artifactregistry.v1beta2.Tag,
com.google.devtools.artifactregistry.v1beta2.Tag.Builder,
com.google.devtools.artifactregistry.v1beta2.TagOrBuilder>(
tags_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
tags_ = null;
}
return tagsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* The token to retrieve the next page of tags, or empty if there are no
* more tags to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of tags, or empty if there are no
* more tags to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The token to retrieve the next page of tags, or empty if there are no
* more tags to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to retrieve the next page of tags, or empty if there are no
* more tags to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The token to retrieve the next page of tags, or empty if there are no
* more tags to return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1beta2.ListTagsResponse)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1beta2.ListTagsResponse)
private static final com.google.devtools.artifactregistry.v1beta2.ListTagsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1beta2.ListTagsResponse();
}
public static com.google.devtools.artifactregistry.v1beta2.ListTagsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListTagsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListTagsResponse>() {
@java.lang.Override
public ListTagsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListTagsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListTagsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1beta2.ListTagsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,644 | java-artifact-registry/proto-google-cloud-artifact-registry-v1/src/main/java/com/google/devtools/artifactregistry/v1/ListAttachmentsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/artifactregistry/v1/attachment.proto
// Protobuf Java Version: 3.25.8
package com.google.devtools.artifactregistry.v1;
/**
*
*
* <pre>
* The request to list attachments.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.ListAttachmentsRequest}
*/
public final class ListAttachmentsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.devtools.artifactregistry.v1.ListAttachmentsRequest)
ListAttachmentsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAttachmentsRequest.newBuilder() to construct.
private ListAttachmentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAttachmentsRequest() {
parent_ = "";
filter_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAttachmentsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_ListAttachmentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_ListAttachmentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.ListAttachmentsRequest.class,
com.google.devtools.artifactregistry.v1.ListAttachmentsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the parent resource whose attachments will be listed.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource whose attachments will be listed.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. An expression for filtering the results of the request. Filter
* rules are case insensitive. The fields eligible for filtering are:
*
* * `target`
* * `type`
* * `attachment_namespace`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. An expression for filtering the results of the request. Filter
* rules are case insensitive. The fields eligible for filtering are:
*
* * `target`
* * `type`
* * `attachment_namespace`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of attachments to return. Maximum page size is 1,000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.devtools.artifactregistry.v1.ListAttachmentsRequest)) {
return super.equals(obj);
}
com.google.devtools.artifactregistry.v1.ListAttachmentsRequest other =
(com.google.devtools.artifactregistry.v1.ListAttachmentsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.devtools.artifactregistry.v1.ListAttachmentsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request to list attachments.
* </pre>
*
* Protobuf type {@code google.devtools.artifactregistry.v1.ListAttachmentsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.devtools.artifactregistry.v1.ListAttachmentsRequest)
com.google.devtools.artifactregistry.v1.ListAttachmentsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_ListAttachmentsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_ListAttachmentsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.devtools.artifactregistry.v1.ListAttachmentsRequest.class,
com.google.devtools.artifactregistry.v1.ListAttachmentsRequest.Builder.class);
}
// Construct using com.google.devtools.artifactregistry.v1.ListAttachmentsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
filter_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.devtools.artifactregistry.v1.AttachmentProto
.internal_static_google_devtools_artifactregistry_v1_ListAttachmentsRequest_descriptor;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListAttachmentsRequest
getDefaultInstanceForType() {
return com.google.devtools.artifactregistry.v1.ListAttachmentsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListAttachmentsRequest build() {
com.google.devtools.artifactregistry.v1.ListAttachmentsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListAttachmentsRequest buildPartial() {
com.google.devtools.artifactregistry.v1.ListAttachmentsRequest result =
new com.google.devtools.artifactregistry.v1.ListAttachmentsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.devtools.artifactregistry.v1.ListAttachmentsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.filter_ = filter_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.devtools.artifactregistry.v1.ListAttachmentsRequest) {
return mergeFrom((com.google.devtools.artifactregistry.v1.ListAttachmentsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.devtools.artifactregistry.v1.ListAttachmentsRequest other) {
if (other
== com.google.devtools.artifactregistry.v1.ListAttachmentsRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the parent resource whose attachments will be listed.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource whose attachments will be listed.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the parent resource whose attachments will be listed.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the parent resource whose attachments will be listed.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the parent resource whose attachments will be listed.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* Optional. An expression for filtering the results of the request. Filter
* rules are case insensitive. The fields eligible for filtering are:
*
* * `target`
* * `type`
* * `attachment_namespace`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An expression for filtering the results of the request. Filter
* rules are case insensitive. The fields eligible for filtering are:
*
* * `target`
* * `type`
* * `attachment_namespace`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An expression for filtering the results of the request. Filter
* rules are case insensitive. The fields eligible for filtering are:
*
* * `target`
* * `type`
* * `attachment_namespace`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An expression for filtering the results of the request. Filter
* rules are case insensitive. The fields eligible for filtering are:
*
* * `target`
* * `type`
* * `attachment_namespace`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An expression for filtering the results of the request. Filter
* rules are case insensitive. The fields eligible for filtering are:
*
* * `target`
* * `type`
* * `attachment_namespace`
* </pre>
*
* <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of attachments to return. Maximum page size is 1,000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of attachments to return. Maximum page size is 1,000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of attachments to return. Maximum page size is 1,000.
* </pre>
*
* <code>int32 page_size = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The next_page_token value returned from a previous list request, if any.
* </pre>
*
* <code>string page_token = 4;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.devtools.artifactregistry.v1.ListAttachmentsRequest)
}
// @@protoc_insertion_point(class_scope:google.devtools.artifactregistry.v1.ListAttachmentsRequest)
private static final com.google.devtools.artifactregistry.v1.ListAttachmentsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.devtools.artifactregistry.v1.ListAttachmentsRequest();
}
public static com.google.devtools.artifactregistry.v1.ListAttachmentsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAttachmentsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListAttachmentsRequest>() {
@java.lang.Override
public ListAttachmentsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAttachmentsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAttachmentsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.devtools.artifactregistry.v1.ListAttachmentsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/jspwiki | 35,484 | jspwiki-main/src/main/java/org/apache/wiki/auth/SecurityVerifier.java | /*
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package org.apache.wiki.auth;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.wiki.api.core.Engine;
import org.apache.wiki.api.core.Session;
import org.apache.wiki.api.exceptions.WikiException;
import org.apache.wiki.auth.authorize.Group;
import org.apache.wiki.auth.authorize.GroupDatabase;
import org.apache.wiki.auth.authorize.GroupManager;
import org.apache.wiki.auth.authorize.Role;
import org.apache.wiki.auth.authorize.WebContainerAuthorizer;
import org.apache.wiki.auth.permissions.AllPermission;
import org.apache.wiki.auth.permissions.GroupPermission;
import org.apache.wiki.auth.permissions.PermissionFactory;
import org.apache.wiki.auth.permissions.WikiPermission;
import org.apache.wiki.auth.user.DummyUserDatabase;
import org.apache.wiki.auth.user.UserDatabase;
import org.apache.wiki.auth.user.UserProfile;
import org.freshcookies.security.policy.PolicyReader;
import javax.security.auth.Subject;
import javax.security.auth.spi.LoginModule;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.AccessControlException;
import java.security.AccessController;
import java.security.KeyStore;
import java.security.Permission;
import java.security.Principal;
import java.security.PrivilegedAction;
import java.security.ProtectionDomain;
import java.util.Arrays;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
/**
* Helper class for verifying JSPWiki's security configuration. Invoked by <code>admin/SecurityConfig.jsp</code>.
*
* @since 2.4
*/
public final class SecurityVerifier {
private final Engine m_engine;
private boolean m_isSecurityPolicyConfigured;
private Principal[] m_policyPrincipals = new Principal[0];
private final Session m_session;
/** Message prefix for errors. */
public static final String ERROR = "Error.";
/** Message prefix for warnings. */
public static final String WARNING = "Warning.";
/** Message prefix for information messages. */
public static final String INFO = "Info.";
/** Message topic for policy errors. */
public static final String ERROR_POLICY = "Error.Policy";
/** Message topic for policy warnings. */
public static final String WARNING_POLICY = "Warning.Policy";
/** Message topic for policy information messages. */
public static final String INFO_POLICY = "Info.Policy";
/** Message topic for JAAS errors. */
public static final String ERROR_JAAS = "Error.Jaas";
/** Message topic for JAAS warnings. */
public static final String WARNING_JAAS = "Warning.Jaas";
/** Message topic for role-checking errors. */
public static final String ERROR_ROLES = "Error.Roles";
/** Message topic for role-checking information messages. */
public static final String INFO_ROLES = "Info.Roles";
/** Message topic for user database errors. */
public static final String ERROR_DB = "Error.UserDatabase";
/** Message topic for user database warnings. */
public static final String WARNING_DB = "Warning.UserDatabase";
/** Message topic for user database information messages. */
public static final String INFO_DB = "Info.UserDatabase";
/** Message topic for group database errors. */
public static final String ERROR_GROUPS = "Error.GroupDatabase";
/** Message topic for group database warnings. */
public static final String WARNING_GROUPS = "Warning.GroupDatabase";
/** Message topic for group database information messages. */
public static final String INFO_GROUPS = "Info.GroupDatabase";
/** Message topic for JAAS information messages. */
public static final String INFO_JAAS = "Info.Jaas";
private static final String[] CONTAINER_ACTIONS = new String[] { "View pages",
"Comment on existing pages",
"Edit pages",
"Upload attachments",
"Create a new group",
"Rename an existing page",
"Delete pages"
};
private static final String[] CONTAINER_JSPS = new String[] { "/Wiki.jsp",
"/Comment.jsp",
"/Edit.jsp",
"/Upload.jsp",
"/NewGroup.jsp",
"/Rename.jsp",
"/Delete.jsp"
};
private static final String BG_GREEN = "bgcolor=\"#c0ffc0\"";
private static final String BG_RED = "bgcolor=\"#ffc0c0\"";
private static final Logger LOG = LogManager.getLogger( SecurityVerifier.class.getName() );
/**
* Constructs a new SecurityVerifier for a supplied Engine and WikiSession.
*
* @param engine the wiki engine
* @param session the wiki session (typically, that of an administrator)
*/
public SecurityVerifier( final Engine engine, final Session session ) {
m_engine = engine;
m_session = session;
m_session.clearMessages();
verifyJaas();
verifyPolicy();
try {
verifyPolicyAndContainerRoles();
} catch( final WikiException e ) {
m_session.addMessage( ERROR_ROLES, e.getMessage() );
}
verifyGroupDatabase();
verifyUserDatabase();
}
/**
* Returns an array of unique Principals from the JSPWIki security policy
* file. This array will be zero-length if the policy file was not
* successfully located, or if the file did not specify any Principals in
* the policy.
* @return the array of principals
*/
public Principal[] policyPrincipals()
{
return m_policyPrincipals;
}
/**
* Formats and returns an HTML table containing sample permissions and what
* roles are allowed to have them. This method will throw an
* {@link IllegalStateException} if the authorizer is not of type
* {@link org.apache.wiki.auth.authorize.WebContainerAuthorizer}
* @return the formatted HTML table containing the result of the tests
*/
public String policyRoleTable()
{
final Principal[] roles = m_policyPrincipals;
final String wiki = m_engine.getApplicationName();
final String[] pages = new String[]
{ "Main", "Index", "GroupTest", "GroupAdmin" };
final String[] pageActions = new String[]
{ "view", "edit", "modify", "rename", "delete" };
final String[] groups = new String[]
{ "Admin", "TestGroup", "Foo" };
final String[] groupActions = new String[]
{ "view", "edit", null, null, "delete" };
final int rolesLength = roles.length;
final int pageActionsLength = pageActions.length;
// Calculate column widths
final String colWidth;
if( rolesLength > 0 ) {
colWidth = ( 67f / ( pageActionsLength * rolesLength ) ) + "%";
} else {
colWidth = "67%";
}
final StringBuilder s = new StringBuilder();
// Write the table header
s.append( "<table class=\"wikitable\" border=\"1\">\n" );
s.append( " <colgroup span=\"1\" width=\"33%\"/>\n" );
s.append( " <colgroup span=\"" ).append( pageActionsLength * rolesLength ).append( "\" width=\"" ).append( colWidth ).append( "\" align=\"center\"/>\n" );
s.append( " <tr>\n" );
s.append( " <th rowspan=\"2\" valign=\"bottom\">Permission</th>\n" );
for (final Principal principal : roles) {
s.append(" <th colspan=\"").append(pageActionsLength).append("\" title=\"").append(principal.getClass().getName()).append("\">").append(principal.getName()).append("</th>\n");
}
s.append( " </tr>\n" );
// Print a column for each role
s.append( " <tr>\n" );
for( int i = 0; i < rolesLength; i++ )
{
for( final String pageAction : pageActions )
{
final String action = pageAction.substring( 0, 1 );
s.append( " <th title=\"" ).append( pageAction ).append( "\">" ).append( action ).append( "</th>\n" );
}
}
s.append( " </tr>\n" );
// Write page permission tests first
for( final String page : pages ) {
s.append( " <tr>\n" );
s.append( " <td>PagePermission \"" ).append( wiki ).append( ":" ).append( page ).append( "\"</td>\n" );
for( final Principal role : roles ) {
for( final String pageAction : pageActions ) {
final Permission permission = PermissionFactory.getPagePermission( wiki + ":" + page, pageAction );
s.append( printPermissionTest( permission, role, 1 ) );
}
}
s.append( " </tr>\n" );
}
// Now do the group tests
for( final String group : groups ) {
s.append( " <tr>\n" );
s.append( " <td>GroupPermission \"" ).append( wiki ).append( ":" ).append( group ).append( "\"</td>\n" );
for( final Principal role : roles ) {
for( final String groupAction : groupActions ) {
Permission permission = null;
if( groupAction != null ) {
permission = new GroupPermission( wiki + ":" + group, groupAction );
}
s.append( printPermissionTest( permission, role, 1 ) );
}
}
s.append( " </tr>\n" );
}
// Now check the wiki-wide permissions
final String[] wikiPerms = new String[] { "createGroups", "createPages", "login", "editPreferences", "editProfile" };
for( final String wikiPerm : wikiPerms ) {
s.append( " <tr>\n" );
s.append( " <td>WikiPermission \"" ).append( wiki ).append( "\",\"" ).append( wikiPerm ).append( "\"</td>\n" );
for( final Principal role : roles ) {
final Permission permission = new WikiPermission( wiki, wikiPerm );
s.append( printPermissionTest( permission, role, pageActionsLength ) );
}
s.append( " </tr>\n" );
}
// Lastly, check for AllPermission
s.append( " <tr>\n" );
s.append( " <td>AllPermission \"" ).append( wiki ).append( "\"</td>\n" );
for( final Principal role : roles )
{
final Permission permission = new AllPermission( wiki );
s.append( printPermissionTest( permission, role, pageActionsLength ) );
}
s.append( " </tr>\n" );
// We're done!
s.append( "</table>" );
return s.toString();
}
/**
* Prints a <td> HTML element with the results of a permission test.
* @param permission the permission to format
* @param principal
* @param cols
*/
private String printPermissionTest( final Permission permission, final Principal principal, final int cols ) {
final StringBuilder s = new StringBuilder();
if( permission == null ) {
s.append( " <td colspan=\"" ).append( cols ).append( "\" align=\"center\" title=\"N/A\">" );
s.append( " </td>\n" );
} else {
final boolean allowed = verifyStaticPermission( principal, permission );
s.append( " <td colspan=\"" ).append( cols ).append( "\" align=\"center\" title=\"" );
s.append( allowed ? "ALLOW: " : "DENY: " );
s.append( permission.getClass().getName() );
s.append( " "" );
s.append( permission.getName() );
s.append( """ );
if ( permission.getName() != null )
{
s.append( ","" );
s.append( permission.getActions() );
s.append( """ );
}
s.append( " " );
s.append( principal.getClass().getName() );
s.append( " "" );
s.append( principal.getName() );
s.append( """ );
s.append( "\"" );
s.append( allowed ? BG_GREEN + ">" : BG_RED + ">" );
s.append( " </td>\n" );
}
return s.toString();
}
/**
* Formats and returns an HTML table containing the roles the web container
* is aware of, and whether each role maps to particular JSPs. This method
* throws an {@link IllegalStateException} if the authorizer is not of type
* {@link org.apache.wiki.auth.authorize.WebContainerAuthorizer}
* @return the formatted HTML table containing the result of the tests
* @throws WikiException if tests fail for unexpected reasons
*/
public String containerRoleTable() throws WikiException {
final AuthorizationManager authorizationManager = m_engine.getManager( AuthorizationManager.class );
final Authorizer authorizer = authorizationManager.getAuthorizer();
// If authorizer not WebContainerAuthorizer, print error message
if ( !( authorizer instanceof final WebContainerAuthorizer wca ) ) {
throw new IllegalStateException( "Authorizer should be WebContainerAuthorizer" );
}
// Now, print a table with JSP pages listed on the left, and
// an evaluation of each pages' constraints for each role
// we discovered
final StringBuilder s = new StringBuilder();
final Principal[] roles = authorizer.getRoles();
s.append( "<table class=\"wikitable\" border=\"1\">\n" );
s.append( "<thead>\n" );
s.append( " <tr>\n" );
s.append( " <th rowspan=\"2\">Action</th>\n" );
s.append( " <th rowspan=\"2\">Page</th>\n" );
s.append( " <th colspan=\"" ).append( roles.length ).append( 1 ).append( "\">Roles</th>\n" );
s.append( " </tr>\n" );
s.append( " <tr>\n" );
s.append( " <th>Anonymous</th>\n" );
for( final Principal role : roles ) {
s.append( " <th>" ).append( role.getName() ).append( "</th>\n" );
}
s.append( "</tr>\n" );
s.append( "</thead>\n" );
s.append( "<tbody>\n" );
for( int i = 0; i < CONTAINER_ACTIONS.length; i++ ) {
final String action = CONTAINER_ACTIONS[i];
final String jsp = CONTAINER_JSPS[i];
// Print whether the page is constrained for each role
final boolean allowsAnonymous = !wca.isConstrained( jsp, Role.ALL );
s.append( " <tr>\n" );
s.append( " <td>" ).append( action ).append( "</td>\n" );
s.append( " <td>" ).append( jsp ).append( "</td>\n" );
s.append( " <td title=\"" );
s.append( allowsAnonymous ? "ALLOW: " : "DENY: " );
s.append( jsp );
s.append( " Anonymous" );
s.append( "\"" );
s.append( allowsAnonymous ? BG_GREEN + ">" : BG_RED + ">" );
s.append( " </td>\n" );
for( final Principal role : roles )
{
final boolean allowed = allowsAnonymous || wca.isConstrained( jsp, (Role)role );
s.append( " <td title=\"" );
s.append( allowed ? "ALLOW: " : "DENY: " );
s.append( jsp );
s.append( " " );
s.append( role.getClass().getName() );
s.append( " "" );
s.append( role.getName() );
s.append( """ );
s.append( "\"" );
s.append( allowed ? BG_GREEN + ">" : BG_RED + ">" );
s.append( " </td>\n" );
}
s.append( " </tr>\n" );
}
s.append( "</tbody>\n" );
s.append( "</table>\n" );
return s.toString();
}
/**
* Returns <code>true</code> if the Java security policy is configured
* correctly, and it verifies as valid.
* @return the result of the configuration check
*/
public boolean isSecurityPolicyConfigured()
{
return m_isSecurityPolicyConfigured;
}
/**
* If the active Authorizer is the WebContainerAuthorizer, returns the roles it knows about; otherwise, a zero-length array.
*
* @return the roles parsed from <code>web.xml</code>, or a zero-length array
* @throws WikiException if the web authorizer cannot obtain the list of roles
*/
public Principal[] webContainerRoles() throws WikiException {
final Authorizer authorizer = m_engine.getManager( AuthorizationManager.class ).getAuthorizer();
if ( authorizer instanceof WebContainerAuthorizer ) {
return authorizer.getRoles();
}
return new Principal[0];
}
/**
* Verifies that the roles given in the security policy are reflected by the
* container <code>web.xml</code> file.
* @throws WikiException if the web authorizer cannot verify the roles
*/
void verifyPolicyAndContainerRoles() throws WikiException {
final Authorizer authorizer = m_engine.getManager( AuthorizationManager.class ).getAuthorizer();
final Principal[] containerRoles = authorizer.getRoles();
boolean missing = false;
for( final Principal principal : m_policyPrincipals ) {
if( principal instanceof final Role role ) {
final boolean isContainerRole = ArrayUtils.contains( containerRoles, role );
if ( !Role.isBuiltInRole( role ) && !isContainerRole ) {
m_session.addMessage( ERROR_ROLES, "Role '" + role.getName() + "' is defined in security policy but not in web.xml." );
missing = true;
}
}
}
if ( !missing ) {
m_session.addMessage( INFO_ROLES, "Every non-standard role defined in the security policy was also found in web.xml." );
}
}
/**
* Verifies that the group datbase was initialized properly, and that
* user add and delete operations work as they should.
*/
void verifyGroupDatabase() {
final GroupManager mgr = m_engine.getManager( GroupManager.class );
GroupDatabase db = null;
try {
db = m_engine.getManager( GroupManager.class ).getGroupDatabase();
} catch ( final WikiSecurityException e ) {
m_session.addMessage( ERROR_GROUPS, "Could not retrieve GroupManager: " + e.getMessage() );
}
// Check for obvious error conditions
if ( mgr == null || db == null ) {
if ( mgr == null ) {
m_session.addMessage( ERROR_GROUPS, "GroupManager is null; JSPWiki could not initialize it. Check the error logs." );
}
if ( db == null ) {
m_session.addMessage( ERROR_GROUPS, "GroupDatabase is null; JSPWiki could not initialize it. Check the error logs." );
}
return;
}
// Everything initialized OK...
// Tell user what class of database this is.
m_session.addMessage( INFO_GROUPS, "GroupDatabase is of type '" + db.getClass().getName() + "'. It appears to be initialized properly." );
// Now, see how many groups we have.
final int oldGroupCount;
try {
final Group[] groups = db.groups();
oldGroupCount = groups.length;
m_session.addMessage( INFO_GROUPS, "The group database contains " + oldGroupCount + " groups." );
} catch( final WikiSecurityException e ) {
m_session.addMessage( ERROR_GROUPS, "Could not obtain a list of current groups: " + e.getMessage() );
return;
}
// Try adding a bogus group with random name
final String name = "TestGroup" + System.currentTimeMillis();
final Group group;
try {
// Create dummy test group
group = mgr.parseGroup( name, "", true );
final Principal user = new WikiPrincipal( "TestUser" );
group.add( user );
db.save( group, new WikiPrincipal( "SecurityVerifier" ) );
// Make sure the group saved successfully
if( db.groups().length == oldGroupCount ) {
m_session.addMessage( ERROR_GROUPS, "Could not add a test group to the database." );
return;
}
m_session.addMessage( INFO_GROUPS, "The group database allows new groups to be created, as it should." );
} catch( final WikiSecurityException e ) {
m_session.addMessage( ERROR_GROUPS, "Could not add a group to the database: " + e.getMessage() );
return;
}
// Now delete the group; should be back to old count
try {
db.delete( group );
if( db.groups().length != oldGroupCount ) {
m_session.addMessage( ERROR_GROUPS, "Could not delete a test group from the database." );
return;
}
m_session.addMessage( INFO_GROUPS, "The group database allows groups to be deleted, as it should." );
} catch( final WikiSecurityException e ) {
m_session.addMessage( ERROR_GROUPS, "Could not delete a test group from the database: " + e.getMessage() );
return;
}
m_session.addMessage( INFO_GROUPS, "The group database configuration looks fine." );
}
/**
* Verfies the JAAS configuration. The configuration is valid if value of the
* <code>jspwiki.properties<code> property
* {@value org.apache.wiki.auth.AuthenticationManager#PROP_LOGIN_MODULE}
* resolves to a valid class on the classpath.
*/
void verifyJaas() {
// Verify that the specified JAAS moduie corresponds to a class we can load successfully.
final String jaasClass = m_engine.getWikiProperties().getProperty( AuthenticationManager.PROP_LOGIN_MODULE );
if( jaasClass == null || jaasClass.isEmpty() ) {
m_session.addMessage( ERROR_JAAS, "The value of the '" + AuthenticationManager.PROP_LOGIN_MODULE
+ "' property was null or blank. This is a fatal error. This value should be set to a valid LoginModule implementation "
+ "on the classpath." );
return;
}
// See if we can find the LoginModule on the classpath
Class< ? > c = null;
try {
m_session.addMessage( INFO_JAAS,
"The property '" + AuthenticationManager.PROP_LOGIN_MODULE + "' specified the class '" + jaasClass + ".'" );
c = Class.forName( jaasClass );
} catch( final ClassNotFoundException e ) {
m_session.addMessage( ERROR_JAAS, "We could not find the the class '" + jaasClass + "' on the " + "classpath. This is fatal error." );
}
// Is the specified class actually a LoginModule?
if( LoginModule.class.isAssignableFrom( c ) ) {
m_session.addMessage( INFO_JAAS, "We found the the class '" + jaasClass + "' on the classpath, and it is a LoginModule implementation. Good!" );
} else {
m_session.addMessage( ERROR_JAAS, "We found the the class '" + jaasClass + "' on the classpath, but it does not seem to be LoginModule implementation! This is fatal error." );
}
}
/**
* Looks up a file name based on a JRE system property and returns the associated
* File object if it exists. This method adds messages with the topic prefix
* {@link #ERROR} and {@link #INFO} as appropriate, with the suffix matching the
* supplied property.
* @param property the system property to look up
* @return the file object, or <code>null</code> if not found
*/
File getFileFromProperty( final String property )
{
String propertyValue;
try
{
propertyValue = System.getProperty( property );
if ( propertyValue == null )
{
m_session.addMessage( "Error." + property, "The system property '" + property + "' is null." );
return null;
}
//
// It's also possible to use "==" to mark a property. We remove that
// here so that we can actually find the property file, then.
//
if( propertyValue.startsWith("=") )
{
propertyValue = propertyValue.substring(1);
}
try
{
m_session.addMessage( "Info." + property, "The system property '" + property + "' is set to: "
+ propertyValue + "." );
// Prepend a file: prefix if not there already
if ( !propertyValue.startsWith( "file:" ) )
{
propertyValue = "file:" + propertyValue;
}
final URL url = new URL( propertyValue );
final File file = new File( url.getPath() );
if ( file.exists() )
{
m_session.addMessage( "Info." + property, "File '" + propertyValue + "' exists in the filesystem." );
return file;
}
}
catch( final MalformedURLException e )
{
// Swallow exception because we can't find it anyway
}
m_session.addMessage( "Error." + property, "File '" + propertyValue
+ "' doesn't seem to exist. This might be a problem." );
return null;
}
catch( final SecurityException e )
{
m_session.addMessage( "Error." + property, "We could not read system property '" + property
+ "'. This is probably because you are running with a security manager." );
return null;
}
}
/**
* Verfies the Java security policy configuration. The configuration is
* valid if value of the local policy (at <code>WEB-INF/jspwiki.policy</code>
* resolves to an existing file, and the policy file contained therein
* represents a valid policy.
*/
@SuppressWarnings("unchecked")
void verifyPolicy() {
// Look up the policy file and set the status text.
final URL policyURL = m_engine.findConfigFile( AuthorizationManager.DEFAULT_POLICY );
String path = policyURL.getPath();
if ( path.startsWith("file:") ) {
path = path.substring( 5 );
}
final File policyFile = new File( path );
// Next, verify the policy
try {
// Get the file
final PolicyReader policy = new PolicyReader( policyFile );
m_session.addMessage( INFO_POLICY, "The security policy '" + policy.getFile() + "' exists." );
// See if there is a keystore that's valid
final KeyStore ks = policy.getKeyStore();
if ( ks == null ) {
m_session.addMessage( WARNING_POLICY,
"Policy file does not have a keystore... at least not one that we can locate. If your policy file " +
"does not contain any 'signedBy' blocks, this is probably ok." );
} else {
m_session.addMessage( INFO_POLICY,
"The security policy specifies a keystore, and we were able to locate it in the filesystem." );
}
// Verify the file
policy.read();
final List<Exception> errors = policy.getMessages();
if (!errors.isEmpty()) {
for( final Exception e : errors ) {
m_session.addMessage( ERROR_POLICY, e.getMessage() );
}
} else {
m_session.addMessage( INFO_POLICY, "The security policy looks fine." );
m_isSecurityPolicyConfigured = true;
}
// Stash the unique principals mentioned in the file,
// plus our standard roles.
final Set<Principal> principals = new LinkedHashSet<>();
principals.add( Role.ALL );
principals.add( Role.ANONYMOUS );
principals.add( Role.ASSERTED );
principals.add( Role.AUTHENTICATED );
final ProtectionDomain[] domains = policy.getProtectionDomains();
for ( final ProtectionDomain domain : domains ) {
principals.addAll(Arrays.asList(domain.getPrincipals()));
}
m_policyPrincipals = principals.toArray( new Principal[0] );
} catch( final IOException e ) {
m_session.addMessage( ERROR_POLICY, e.getMessage() );
}
}
/**
* Verifies that a particular Principal possesses a Permission, as defined
* in the security policy file.
* @param principal the principal
* @param permission the permission
* @return the result, based on consultation with the active Java security
* policy
*/
boolean verifyStaticPermission( final Principal principal, final Permission permission )
{
final Subject subject = new Subject();
subject.getPrincipals().add( principal );
final boolean allowedByGlobalPolicy = (Boolean)
Subject.doAsPrivileged( subject, ( PrivilegedAction< Object > )() -> {
try {
AccessController.checkPermission( permission );
return Boolean.TRUE;
} catch( final AccessControlException e ) {
return Boolean.FALSE;
}
}, null );
if ( allowedByGlobalPolicy )
{
return true;
}
// Check local policy
final Principal[] principals = new Principal[]{ principal };
return m_engine.getManager( AuthorizationManager.class ).allowedByLocalPolicy( principals, permission );
}
/**
* Verifies that the user datbase was initialized properly, and that
* user add and delete operations work as they should.
*/
void verifyUserDatabase() {
final UserDatabase db = m_engine.getManager( UserManager.class ).getUserDatabase();
// Check for obvious error conditions
if ( db == null ) {
m_session.addMessage( ERROR_DB, "UserDatabase is null; JSPWiki could not initialize it. Check the error logs." );
return;
}
if ( db instanceof DummyUserDatabase ) {
m_session.addMessage( ERROR_DB, "UserDatabase is DummyUserDatabase; JSPWiki " +
"may not have been able to initialize the database you supplied in " +
"jspwiki.properties, or you left the 'jspwiki.userdatabase' property " +
"blank. Check the error logs." );
}
// Tell user what class of database this is.
m_session.addMessage( INFO_DB, "UserDatabase is of type '" + db.getClass().getName() +
"'. It appears to be initialized properly." );
// Now, see how many users we have.
final int oldUserCount;
try {
final Principal[] users = db.getWikiNames();
oldUserCount = users.length;
m_session.addMessage( INFO_DB, "The user database contains " + oldUserCount + " users." );
} catch( final WikiSecurityException e ) {
m_session.addMessage( ERROR_DB, "Could not obtain a list of current users: " + e.getMessage() );
return;
}
// Try adding a bogus user with random name
final String loginName = "TestUser" + System.currentTimeMillis();
try {
final UserProfile profile = db.newProfile();
profile.setEmail( "jspwiki.tests@mailinator.com" );
profile.setLoginName( loginName );
profile.setFullname( "FullName" + loginName );
profile.setPassword( "password" );
db.save( profile );
// Make sure the profile saved successfully
if( db.getWikiNames().length == oldUserCount ) {
m_session.addMessage( ERROR_DB, "Could not add a test user to the database." );
return;
}
m_session.addMessage( INFO_DB, "The user database allows new users to be created, as it should." );
} catch( final WikiSecurityException e ) {
m_session.addMessage( ERROR_DB, "Could not add a test user to the database: " + e.getMessage() );
return;
}
// Now delete the profile; should be back to old count
try {
db.deleteByLoginName( loginName );
if( db.getWikiNames().length != oldUserCount ) {
m_session.addMessage( ERROR_DB, "Could not delete a test user from the database." );
return;
}
m_session.addMessage( INFO_DB, "The user database allows users to be deleted, as it should." );
} catch( final WikiSecurityException e ) {
m_session.addMessage( ERROR_DB, "Could not delete a test user to the database: " + e.getMessage() );
return;
}
m_session.addMessage( INFO_DB, "The user database configuration looks fine." );
}
}
|
googleapis/google-cloud-java | 35,734 | java-shopping-merchant-lfp/proto-google-shopping-merchant-lfp-v1/src/main/java/com/google/shopping/merchant/lfp/v1/ListLfpStoresRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/lfp/v1/lfpstore.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.lfp.v1;
/**
*
*
* <pre>
* Request message for the ListLfpStores method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.lfp.v1.ListLfpStoresRequest}
*/
public final class ListLfpStoresRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.lfp.v1.ListLfpStoresRequest)
ListLfpStoresRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListLfpStoresRequest.newBuilder() to construct.
private ListLfpStoresRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListLfpStoresRequest() {
parent_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListLfpStoresRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.lfp.v1.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1_ListLfpStoresRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.lfp.v1.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1_ListLfpStoresRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest.class,
com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TARGET_ACCOUNT_FIELD_NUMBER = 2;
private long targetAccount_ = 0L;
/**
*
*
* <pre>
* Required. The Merchant Center id of the merchant to list stores for.
* </pre>
*
* <code>int64 target_account = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The targetAccount.
*/
@java.lang.Override
public long getTargetAccount() {
return targetAccount_;
}
public static final int PAGE_SIZE_FIELD_NUMBER = 3;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Optional. The maximum number of `LfpStore` resources for the given account
* to return. The service returns fewer than this value if the number of
* stores for the given account is less than the `pageSize`. The default value
* is 250. The maximum value is 1000; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (targetAccount_ != 0L) {
output.writeInt64(2, targetAccount_);
}
if (pageSize_ != 0) {
output.writeInt32(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (targetAccount_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, targetAccount_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest other =
(com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getTargetAccount() != other.getTargetAccount()) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + TARGET_ACCOUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getTargetAccount());
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for the ListLfpStores method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.lfp.v1.ListLfpStoresRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.lfp.v1.ListLfpStoresRequest)
com.google.shopping.merchant.lfp.v1.ListLfpStoresRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.lfp.v1.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1_ListLfpStoresRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.lfp.v1.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1_ListLfpStoresRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest.class,
com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest.Builder.class);
}
// Construct using com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
targetAccount_ = 0L;
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.lfp.v1.LfpStoreProto
.internal_static_google_shopping_merchant_lfp_v1_ListLfpStoresRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest getDefaultInstanceForType() {
return com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest build() {
com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest buildPartial() {
com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest result =
new com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.targetAccount_ = targetAccount_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest) {
return mergeFrom((com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest other) {
if (other == com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getTargetAccount() != 0L) {
setTargetAccount(other.getTargetAccount());
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
targetAccount_ = input.readInt64();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000004;
break;
} // case 24
case 34:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The LFP partner.
* Format: `accounts/{account}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private long targetAccount_;
/**
*
*
* <pre>
* Required. The Merchant Center id of the merchant to list stores for.
* </pre>
*
* <code>int64 target_account = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The targetAccount.
*/
@java.lang.Override
public long getTargetAccount() {
return targetAccount_;
}
/**
*
*
* <pre>
* Required. The Merchant Center id of the merchant to list stores for.
* </pre>
*
* <code>int64 target_account = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The targetAccount to set.
* @return This builder for chaining.
*/
public Builder setTargetAccount(long value) {
targetAccount_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Merchant Center id of the merchant to list stores for.
* </pre>
*
* <code>int64 target_account = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearTargetAccount() {
bitField0_ = (bitField0_ & ~0x00000002);
targetAccount_ = 0L;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Optional. The maximum number of `LfpStore` resources for the given account
* to return. The service returns fewer than this value if the number of
* stores for the given account is less than the `pageSize`. The default value
* is 250. The maximum value is 1000; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Optional. The maximum number of `LfpStore` resources for the given account
* to return. The service returns fewer than this value if the number of
* stores for the given account is less than the `pageSize`. The default value
* is 250. The maximum value is 1000; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum number of `LfpStore` resources for the given account
* to return. The service returns fewer than this value if the number of
* stores for the given account is less than the `pageSize`. The default value
* is 250. The maximum value is 1000; If a value higher than the maximum is
* specified, then the `pageSize` will default to the maximum.
* </pre>
*
* <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000004);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A page token, received from a previous `ListLfpStoresRequest`
* call. Provide the page token to retrieve the subsequent page. When
* paginating, all other parameters provided to `ListLfpStoresRequest` must
* match the call that provided the page token. The token returned as
* [nextPageToken][google.shopping.merchant.lfp.v1.ListLfpStoresResponse.next_page_token]
* in the response to the previous request.
* </pre>
*
* <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.lfp.v1.ListLfpStoresRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.lfp.v1.ListLfpStoresRequest)
private static final com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest();
}
public static com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListLfpStoresRequest> PARSER =
new com.google.protobuf.AbstractParser<ListLfpStoresRequest>() {
@java.lang.Override
public ListLfpStoresRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListLfpStoresRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListLfpStoresRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.lfp.v1.ListLfpStoresRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,699 | java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/ListReportsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/channel/v1/reports_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.channel.v1;
/**
*
*
* <pre>
* Response message for
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports].
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.ListReportsResponse}
*/
@java.lang.Deprecated
public final class ListReportsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.channel.v1.ListReportsResponse)
ListReportsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListReportsResponse.newBuilder() to construct.
private ListReportsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListReportsResponse() {
reports_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListReportsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_ListReportsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_ListReportsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.ListReportsResponse.class,
com.google.cloud.channel.v1.ListReportsResponse.Builder.class);
}
public static final int REPORTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.channel.v1.Report> reports_;
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.channel.v1.Report> getReportsList() {
return reports_;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.channel.v1.ReportOrBuilder>
getReportsOrBuilderList() {
return reports_;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
@java.lang.Override
public int getReportsCount() {
return reports_.size();
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
@java.lang.Override
public com.google.cloud.channel.v1.Report getReports(int index) {
return reports_.get(index);
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
@java.lang.Override
public com.google.cloud.channel.v1.ReportOrBuilder getReportsOrBuilder(int index) {
return reports_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pass this token to
* [FetchReportResultsRequest.page_token][google.cloud.channel.v1.FetchReportResultsRequest.page_token]
* to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Pass this token to
* [FetchReportResultsRequest.page_token][google.cloud.channel.v1.FetchReportResultsRequest.page_token]
* to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < reports_.size(); i++) {
output.writeMessage(1, reports_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < reports_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, reports_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.channel.v1.ListReportsResponse)) {
return super.equals(obj);
}
com.google.cloud.channel.v1.ListReportsResponse other =
(com.google.cloud.channel.v1.ListReportsResponse) obj;
if (!getReportsList().equals(other.getReportsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getReportsCount() > 0) {
hash = (37 * hash) + REPORTS_FIELD_NUMBER;
hash = (53 * hash) + getReportsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.channel.v1.ListReportsResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.ListReportsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.channel.v1.ListReportsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [CloudChannelReportsService.ListReports][google.cloud.channel.v1.CloudChannelReportsService.ListReports].
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.ListReportsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.ListReportsResponse)
com.google.cloud.channel.v1.ListReportsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_ListReportsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_ListReportsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.ListReportsResponse.class,
com.google.cloud.channel.v1.ListReportsResponse.Builder.class);
}
// Construct using com.google.cloud.channel.v1.ListReportsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (reportsBuilder_ == null) {
reports_ = java.util.Collections.emptyList();
} else {
reports_ = null;
reportsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_ListReportsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.channel.v1.ListReportsResponse getDefaultInstanceForType() {
return com.google.cloud.channel.v1.ListReportsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.channel.v1.ListReportsResponse build() {
com.google.cloud.channel.v1.ListReportsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.channel.v1.ListReportsResponse buildPartial() {
com.google.cloud.channel.v1.ListReportsResponse result =
new com.google.cloud.channel.v1.ListReportsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.channel.v1.ListReportsResponse result) {
if (reportsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
reports_ = java.util.Collections.unmodifiableList(reports_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.reports_ = reports_;
} else {
result.reports_ = reportsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.channel.v1.ListReportsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.channel.v1.ListReportsResponse) {
return mergeFrom((com.google.cloud.channel.v1.ListReportsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.channel.v1.ListReportsResponse other) {
if (other == com.google.cloud.channel.v1.ListReportsResponse.getDefaultInstance())
return this;
if (reportsBuilder_ == null) {
if (!other.reports_.isEmpty()) {
if (reports_.isEmpty()) {
reports_ = other.reports_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureReportsIsMutable();
reports_.addAll(other.reports_);
}
onChanged();
}
} else {
if (!other.reports_.isEmpty()) {
if (reportsBuilder_.isEmpty()) {
reportsBuilder_.dispose();
reportsBuilder_ = null;
reports_ = other.reports_;
bitField0_ = (bitField0_ & ~0x00000001);
reportsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getReportsFieldBuilder()
: null;
} else {
reportsBuilder_.addAllMessages(other.reports_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.channel.v1.Report m =
input.readMessage(
com.google.cloud.channel.v1.Report.parser(), extensionRegistry);
if (reportsBuilder_ == null) {
ensureReportsIsMutable();
reports_.add(m);
} else {
reportsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.channel.v1.Report> reports_ =
java.util.Collections.emptyList();
private void ensureReportsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
reports_ = new java.util.ArrayList<com.google.cloud.channel.v1.Report>(reports_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.channel.v1.Report,
com.google.cloud.channel.v1.Report.Builder,
com.google.cloud.channel.v1.ReportOrBuilder>
reportsBuilder_;
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public java.util.List<com.google.cloud.channel.v1.Report> getReportsList() {
if (reportsBuilder_ == null) {
return java.util.Collections.unmodifiableList(reports_);
} else {
return reportsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public int getReportsCount() {
if (reportsBuilder_ == null) {
return reports_.size();
} else {
return reportsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public com.google.cloud.channel.v1.Report getReports(int index) {
if (reportsBuilder_ == null) {
return reports_.get(index);
} else {
return reportsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public Builder setReports(int index, com.google.cloud.channel.v1.Report value) {
if (reportsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReportsIsMutable();
reports_.set(index, value);
onChanged();
} else {
reportsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public Builder setReports(
int index, com.google.cloud.channel.v1.Report.Builder builderForValue) {
if (reportsBuilder_ == null) {
ensureReportsIsMutable();
reports_.set(index, builderForValue.build());
onChanged();
} else {
reportsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public Builder addReports(com.google.cloud.channel.v1.Report value) {
if (reportsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReportsIsMutable();
reports_.add(value);
onChanged();
} else {
reportsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public Builder addReports(int index, com.google.cloud.channel.v1.Report value) {
if (reportsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureReportsIsMutable();
reports_.add(index, value);
onChanged();
} else {
reportsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public Builder addReports(com.google.cloud.channel.v1.Report.Builder builderForValue) {
if (reportsBuilder_ == null) {
ensureReportsIsMutable();
reports_.add(builderForValue.build());
onChanged();
} else {
reportsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public Builder addReports(
int index, com.google.cloud.channel.v1.Report.Builder builderForValue) {
if (reportsBuilder_ == null) {
ensureReportsIsMutable();
reports_.add(index, builderForValue.build());
onChanged();
} else {
reportsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public Builder addAllReports(
java.lang.Iterable<? extends com.google.cloud.channel.v1.Report> values) {
if (reportsBuilder_ == null) {
ensureReportsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, reports_);
onChanged();
} else {
reportsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public Builder clearReports() {
if (reportsBuilder_ == null) {
reports_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
reportsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public Builder removeReports(int index) {
if (reportsBuilder_ == null) {
ensureReportsIsMutable();
reports_.remove(index);
onChanged();
} else {
reportsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public com.google.cloud.channel.v1.Report.Builder getReportsBuilder(int index) {
return getReportsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public com.google.cloud.channel.v1.ReportOrBuilder getReportsOrBuilder(int index) {
if (reportsBuilder_ == null) {
return reports_.get(index);
} else {
return reportsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public java.util.List<? extends com.google.cloud.channel.v1.ReportOrBuilder>
getReportsOrBuilderList() {
if (reportsBuilder_ != null) {
return reportsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(reports_);
}
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public com.google.cloud.channel.v1.Report.Builder addReportsBuilder() {
return getReportsFieldBuilder()
.addBuilder(com.google.cloud.channel.v1.Report.getDefaultInstance());
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public com.google.cloud.channel.v1.Report.Builder addReportsBuilder(int index) {
return getReportsFieldBuilder()
.addBuilder(index, com.google.cloud.channel.v1.Report.getDefaultInstance());
}
/**
*
*
* <pre>
* The reports available to the partner.
* </pre>
*
* <code>repeated .google.cloud.channel.v1.Report reports = 1;</code>
*/
public java.util.List<com.google.cloud.channel.v1.Report.Builder> getReportsBuilderList() {
return getReportsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.channel.v1.Report,
com.google.cloud.channel.v1.Report.Builder,
com.google.cloud.channel.v1.ReportOrBuilder>
getReportsFieldBuilder() {
if (reportsBuilder_ == null) {
reportsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.channel.v1.Report,
com.google.cloud.channel.v1.Report.Builder,
com.google.cloud.channel.v1.ReportOrBuilder>(
reports_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
reports_ = null;
}
return reportsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Pass this token to
* [FetchReportResultsRequest.page_token][google.cloud.channel.v1.FetchReportResultsRequest.page_token]
* to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Pass this token to
* [FetchReportResultsRequest.page_token][google.cloud.channel.v1.FetchReportResultsRequest.page_token]
* to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Pass this token to
* [FetchReportResultsRequest.page_token][google.cloud.channel.v1.FetchReportResultsRequest.page_token]
* to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Pass this token to
* [FetchReportResultsRequest.page_token][google.cloud.channel.v1.FetchReportResultsRequest.page_token]
* to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Pass this token to
* [FetchReportResultsRequest.page_token][google.cloud.channel.v1.FetchReportResultsRequest.page_token]
* to retrieve the next page of results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.ListReportsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.channel.v1.ListReportsResponse)
private static final com.google.cloud.channel.v1.ListReportsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.channel.v1.ListReportsResponse();
}
public static com.google.cloud.channel.v1.ListReportsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListReportsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListReportsResponse>() {
@java.lang.Override
public ListReportsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListReportsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListReportsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.channel.v1.ListReportsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/juneau | 33,273 | juneau-utest/src/test/java/org/apache/juneau/cp/FileFinder_Test.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.cp;
import static java.util.Locale.*;
import static org.apache.juneau.TestUtils.*;
import static org.apache.juneau.common.utils.StringUtils.*;
import static org.junit.jupiter.api.Assertions.*;
import java.nio.file.*;
import java.util.*;
import org.apache.juneau.*;
import org.apache.juneau.cp.sub.*;
import org.junit.jupiter.api.*;
public class FileFinder_Test extends TestBase { // NOSONAR - Needs to be public.
private String read(FileFinder ff, String path) throws Exception {
return toUtf8(ff.getStream(path, null).orElse(null));
}
private String read(FileFinder ff, String path, Locale locale) throws Exception {
return toUtf8(ff.getStream(path, locale).orElse(null));
}
//-----------------------------------------------------------------------------------------------------------------
// Basic tests.
//-----------------------------------------------------------------------------------------------------------------
@Test void a01_empty() throws Exception {
var x = FileFinder
.create()
.build();
assertNull(read(x,"files/test1a"));
}
//-----------------------------------------------------------------------------------------------------------------
// File system tests.
//-----------------------------------------------------------------------------------------------------------------
@Test void b01_fileSystem_rootDir() throws Exception {
var x = FileFinder
.create()
.dir(".")
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
for (var p : patterns) {
assertContains("[home:/files/test1/"+p+"]", read(x,"files/test1/"+p));
assertContains("[home:/files/test1/dir/"+p+"]", read(x,"files/test1/dir/"+p));
assertContains("[home:/files/test1/dir/dir/"+p+"]", read(x,"files/test1/dir/dir/"+p));
assertContains("[home:/files/test1/"+p+"]", read(x," / files/test1/"+p+" / "));
assertContains("[home:/files/test1/dir/"+p+"]", read(x," / files/test1/dir/"+p+" / "));
assertContains("[home:/files/test1/dir/dir/"+p+"]", read(x," / files/test1/dir/dir/"+p+" / "));
}
var badPatterns = a("files/test1/bad.txt","files/test1/../test1/_a.txt","files/test1/%2E%2E/test1/_a.txt","files/bad.txt",null,"",".","..","%2E","%2E%2E","j.class","k.properties");
for (var p : badPatterns) {
assertNull(read(x,p));
}
}
@Test void b02_fileSystem_subDir() throws Exception {
var x = FileFinder
.create()
.dir("files")
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
for (var p : patterns) {
assertContains("[home:/files/test1/"+p+"]", read(x,"test1/"+p));
assertContains("[home:/files/test1/dir/"+p+"]", read(x,"test1/dir/"+p));
assertContains("[home:/files/test1/dir/dir/"+p+"]", read(x,"test1/dir/dir/"+p));
assertContains("[home:/files/test1/"+p+"]", read(x,"/test1/"+p+"/"));
assertContains("[home:/files/test1/dir/"+p+"]", read(x,"/test1/dir/"+p+"/"));
assertContains("[home:/files/test1/dir/dir/"+p+"]", read(x,"/test1/dir/dir/"+p+"/"));
}
var badPatterns = a("test1/bad.txt","test1/../test1/_a.txt","bad.txt",null,"",".","..");
for (var p : badPatterns) {
assertNull(read(x,p));
}
}
@Test void b03_fileSystem_localized_flat() throws Exception {
var x = FileFinder
.create()
.dir("files")
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
var patterns_ja = a("_a_ja.txt","_b_ja",".c",".d_ja.txt","e_ja.txt","f_ja","g_foo_ja.txt","h.foo_ja.txt","i_foo_ja");
var patterns_ja_JP = a("_a_ja_JP.txt","_b_ja_JP",".c",".d_ja_JP.txt","e_ja_JP.txt","f_ja_JP","g_foo_ja_JP.txt","h.foo_ja_JP.txt","i_foo_ja_JP");
for (var i = 0; i < patterns.length; i++) {
var p = patterns[i];
var p_ja = patterns_ja[i];
var p_ja_JP = patterns_ja_JP[i];
assertContains("[home:/files/test1/"+p+"]", read(x,"test1/"+p,null));
assertContains("[home:/files/test1/dir/"+p+"]", read(x,"test1/dir/"+p,null));
assertContains("[home:/files/test1/dir/dir/"+p+"]", read(x,"test1/dir/dir/"+p,null));
assertContains("[home:/files/test1/"+p+"]", read(x,"/test1/"+p+"/",null));
assertContains("[home:/files/test1/dir/"+p+"]", read(x,"/test1/dir/"+p+"/",null));
assertContains("[home:/files/test1/dir/dir/"+p+"]", read(x,"/test1/dir/dir/"+p+"/",null));
assertContains("[home:/files/test1/"+p_ja+"]", read(x,"test1/"+p,JAPANESE));
assertContains("[home:/files/test1/dir/"+p_ja+"]", read(x,"test1/dir/"+p,JAPANESE));
assertContains("[home:/files/test1/dir/dir/"+p_ja+"]", read(x,"test1/dir/dir/"+p,JAPANESE));
assertContains("[home:/files/test1/"+p_ja+"]", read(x,"/test1/"+p+"/",JAPANESE));
assertContains("[home:/files/test1/dir/"+p_ja+"]", read(x,"/test1/dir/"+p+"/",JAPANESE));
assertContains("[home:/files/test1/dir/dir/"+p_ja+"]", read(x,"/test1/dir/dir/"+p+"/",JAPANESE));
assertContains("[home:/files/test1/"+p_ja_JP+"]", read(x,"test1/"+p,JAPAN));
assertContains("[home:/files/test1/dir/"+p_ja_JP+"]", read(x,"test1/dir/"+p,JAPAN));
assertContains("[home:/files/test1/dir/dir/"+p_ja_JP+"]", read(x,"test1/dir/dir/"+p,JAPAN));
assertContains("[home:/files/test1/"+p_ja_JP+"]", read(x,"/test1/"+p+"/",JAPAN));
assertContains("[home:/files/test1/dir/"+p_ja_JP+"]", read(x,"/test1/dir/"+p+"/",JAPAN));
assertContains("[home:/files/test1/dir/dir/"+p_ja_JP+"]", read(x,"/test1/dir/dir/"+p+"/",JAPAN));
}
var badPatterns = a("test1/bad.txt","test1/../test1/_a.txt","bad.txt",null,"",".","..","j.class","k.properties");
for (var p : badPatterns) {
assertNull(read(x,p,null));
assertNull(read(x,p,JAPANESE));
assertNull(read(x,p,JAPAN));
}
}
@Test void b04_fileSystem_localized_hierarchical() throws Exception {
var x = FileFinder
.create()
.dir("files/test2")
.build();
assertContains("[home:/files/test2/a.txt]", read(x,"a.txt", null));
assertContains("[home:/files/test2/ja/a.txt]", read(x,"a.txt", JAPANESE));
assertContains("[home:/files/test2/ja/JP/a.txt]", read(x,"a.txt", JAPAN));
assertContains("[home:/files/test2/a.txt]", read(x,"/a.txt/", null));
assertContains("[home:/files/test2/ja/a.txt]", read(x,"/a.txt/", JAPANESE));
assertContains("[home:/files/test2/ja/JP/a.txt]", read(x,"/a.txt/", JAPAN));
assertContains("[home:/files/test2/dir/a.txt]", read(x,"dir/a.txt", null));
assertContains("[home:/files/test2/ja/dir/a.txt]", read(x,"dir/a.txt", JAPANESE));
assertContains("[home:/files/test2/ja/JP/dir/a.txt]", read(x,"dir/a.txt", JAPAN));
assertContains("[home:/files/test2/dir/a.txt]", read(x,"/dir/a.txt/", null));
assertContains("[home:/files/test2/ja/dir/a.txt]", read(x,"/dir/a.txt/", JAPANESE));
assertContains("[home:/files/test2/ja/JP/dir/a.txt]", read(x,"/dir/a.txt/", JAPAN));
assertContains("[home:/files/test2/dir/dir/a.txt]", read(x,"dir/dir/a.txt", null));
assertContains("[home:/files/test2/ja/dir/dir/a.txt]", read(x,"dir/dir/a.txt", JAPANESE));
assertContains("[home:/files/test2/ja/JP/dir/dir/a.txt]", read(x,"dir/dir/a.txt", JAPAN));
assertContains("[home:/files/test2/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", null));
assertContains("[home:/files/test2/ja/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", JAPANESE));
assertContains("[home:/files/test2/ja/JP/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", JAPAN));
}
//-----------------------------------------------------------------------------------------------------------------
// Classpath tests - Classpath root
//-----------------------------------------------------------------------------------------------------------------
@Test void c01_classpathRoot_rootDir() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test.class, "/", false)
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
for (var p : patterns) {
assertContains("[cp:/files/test1/"+p+"]", read(x,"files/test1/"+p));
assertContains("[cp:/files/test1/dir/"+p+"]", read(x,"files/test1/dir/"+p));
assertContains("[cp:/files/test1/dir/dir/"+p+"]", read(x,"files/test1/dir/dir/"+p));
assertContains("[cp:/files/test1/"+p+"]", read(x," / files/test1/"+p+" / "));
assertContains("[cp:/files/test1/dir/"+p+"]", read(x," / files/test1/dir/"+p+" / "));
assertContains("[cp:/files/test1/dir/dir/"+p+"]", read(x," / files/test1/dir/dir/"+p+" / "));
}
var badPatterns = a("files/test1/bad.txt","files/test1/../test1/_a.txt","files/bad.txt",null,"",".","..","LocalizedFileStore_Test.class");
for (var p : badPatterns) {
assertNull(read(x,p));
}
}
@Test void c02_classpathRoot_subdir() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test.class, "/files", false)
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
for (var p : patterns) {
assertContains("[cp:/files/test1/"+p+"]", read(x,"test1/"+p));
assertContains("[cp:/files/test1/dir/"+p+"]", read(x,"test1/dir/"+p));
assertContains("[cp:/files/test1/dir/dir/"+p+"]", read(x,"test1/dir/dir/"+p));
assertContains("[cp:/files/test1/"+p+"]", read(x," / test1/"+p+" / "));
assertContains("[cp:/files/test1/dir/"+p+"]", read(x," / test1/dir/"+p+" / "));
assertContains("[cp:/files/test1/dir/dir/"+p+"]", read(x," / test1/dir/dir/"+p+" / "));
}
var badPatterns = a("files/test1/bad.txt","files/test1/../test1/_a.txt","files/bad.txt",null,"",".","..");
for (var p : badPatterns) {
assertNull(read(x,p));
}
}
@Test void c03_classpathRoot_localized_flat() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test.class, "/files", false)
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
var patterns_ja = a("_a_ja.txt","_b_ja",".c",".d_ja.txt","e_ja.txt","f_ja","g_foo_ja.txt","h.foo_ja.txt","i_foo_ja");
var patterns_ja_JP = a("_a_ja_JP.txt","_b_ja_JP",".c",".d_ja_JP.txt","e_ja_JP.txt","f_ja_JP","g_foo_ja_JP.txt","h.foo_ja_JP.txt","i_foo_ja_JP");
for (var i = 0; i < patterns.length; i++) {
var p = patterns[i];
var p_ja = patterns_ja[i];
var p_ja_JP = patterns_ja_JP[i];
assertContains("[cp:/files/test1/"+p+"]", read(x,"test1/"+p,null));
assertContains("[cp:/files/test1/dir/"+p+"]", read(x,"test1/dir/"+p,null));
assertContains("[cp:/files/test1/dir/dir/"+p+"]", read(x,"test1/dir/dir/"+p,null));
assertContains("[cp:/files/test1/"+p+"]", read(x,"/test1/"+p+"/",null));
assertContains("[cp:/files/test1/dir/"+p+"]", read(x,"/test1/dir/"+p+"/",null));
assertContains("[cp:/files/test1/dir/dir/"+p+"]", read(x,"/test1/dir/dir/"+p+"/",null));
assertContains("[cp:/files/test1/"+p_ja+"]", read(x,"test1/"+p,JAPANESE));
assertContains("[cp:/files/test1/dir/"+p_ja+"]", read(x,"test1/dir/"+p,JAPANESE));
assertContains("[cp:/files/test1/dir/dir/"+p_ja+"]", read(x,"test1/dir/dir/"+p,JAPANESE));
assertContains("[cp:/files/test1/"+p_ja+"]", read(x,"/test1/"+p+"/",JAPANESE));
assertContains("[cp:/files/test1/dir/"+p_ja+"]", read(x,"/test1/dir/"+p+"/",JAPANESE));
assertContains("[cp:/files/test1/dir/dir/"+p_ja+"]", read(x,"/test1/dir/dir/"+p+"/",JAPANESE));
assertContains("[cp:/files/test1/"+p_ja_JP+"]", read(x,"test1/"+p,JAPAN));
assertContains("[cp:/files/test1/dir/"+p_ja_JP+"]", read(x,"test1/dir/"+p,JAPAN));
assertContains("[cp:/files/test1/dir/dir/"+p_ja_JP+"]", read(x,"test1/dir/dir/"+p,JAPAN));
assertContains("[cp:/files/test1/"+p_ja_JP+"]", read(x,"/test1/"+p+"/",JAPAN));
assertContains("[cp:/files/test1/dir/"+p_ja_JP+"]", read(x,"/test1/dir/"+p+"/",JAPAN));
assertContains("[cp:/files/test1/dir/dir/"+p_ja_JP+"]", read(x,"/test1/dir/dir/"+p+"/",JAPAN));
}
var badPatterns = a("test1/bad.txt","test1/../test1/_a.txt","bad.txt",null,"",".","..");
for (var p : badPatterns) {
assertNull(read(x,p,null));
assertNull(read(x,p,JAPANESE));
assertNull(read(x,p,JAPAN));
}
}
@Test void c04_classpathRoot_localized_hierarchical() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test.class, "/files/test2", false)
.build();
assertContains("[cp:/files/test2/a.txt]", read(x,"a.txt", null));
assertContains("[cp:/files/test2/ja/a.txt]", read(x,"a.txt", JAPANESE));
assertContains("[cp:/files/test2/ja/JP/a.txt]", read(x,"a.txt", JAPAN));
assertContains("[cp:/files/test2/a.txt]", read(x,"/a.txt/", null));
assertContains("[cp:/files/test2/ja/a.txt]", read(x,"/a.txt/", JAPANESE));
assertContains("[cp:/files/test2/ja/JP/a.txt]", read(x,"/a.txt/", JAPAN));
assertContains("[cp:/files/test2/dir/a.txt]", read(x,"dir/a.txt", null));
assertContains("[cp:/files/test2/ja/dir/a.txt]", read(x,"dir/a.txt", JAPANESE));
assertContains("[cp:/files/test2/ja/JP/dir/a.txt]", read(x,"dir/a.txt", JAPAN));
assertContains("[cp:/files/test2/dir/a.txt]", read(x,"/dir/a.txt/", null));
assertContains("[cp:/files/test2/ja/dir/a.txt]", read(x,"/dir/a.txt/", JAPANESE));
assertContains("[cp:/files/test2/ja/JP/dir/a.txt]", read(x,"/dir/a.txt/", JAPAN));
assertContains("[cp:/files/test2/dir/dir/a.txt]", read(x,"dir/dir/a.txt", null));
assertContains("[cp:/files/test2/ja/dir/dir/a.txt]", read(x,"dir/dir/a.txt", JAPANESE));
assertContains("[cp:/files/test2/ja/JP/dir/dir/a.txt]", read(x,"dir/dir/a.txt", JAPAN));
assertContains("[cp:/files/test2/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", null));
assertContains("[cp:/files/test2/ja/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", JAPANESE));
assertContains("[cp:/files/test2/ja/JP/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", JAPAN));
}
//-----------------------------------------------------------------------------------------------------------------
// Classpath tests - Classpath relative
//-----------------------------------------------------------------------------------------------------------------
@Test void d01a_classpathRelative_rootDir() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test.class, "", false)
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
for (var p : patterns) {
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x,"files/test1/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x,"files/test1/dir/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x,"files/test1/dir/dir/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x," / files/test1/"+p+" / "));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x," / files/test1/dir/"+p+" / "));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x," / files/test1/dir/dir/"+p+" / "));
}
assertContains("[cp:/org/apache/juneau/cp/_a.txt]", read(x,"_a.txt"));
var badPatterns = a("files/test1/bad.txt","files/test1/../test1/_a.txt","files/bad.txt",null,"",".","..");
for (var p : badPatterns) {
assertNull(read(x,p));
}
}
@Test void d01b_classpathRelative_rootDir_recursive() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test2.class, "", true)
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
for (var p : patterns) {
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x,"files/test1/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x,"files/test1/dir/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x,"files/test1/dir/dir/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x," / files/test1/"+p+" / "));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x," / files/test1/dir/"+p+" / "));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x," / files/test1/dir/dir/"+p+" / "));
}
}
@Test void d02a_classpathRelative_subdir() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test.class, "files", false)
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
for (var p : patterns) {
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x,"test1/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x,"test1/dir/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x,"test1/dir/dir/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x," / test1/"+p+" / "));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x," / test1/dir/"+p+" / "));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x," / test1/dir/dir/"+p+" / "));
}
var badPatterns = a("files/test1/bad.txt","files/test1/../test1/_a.txt","files/bad.txt",null,"",".","..");
for (var p : badPatterns) {
assertNull(read(x,p));
}
}
@Test void d02b_classpathRelative_subdir_recursive() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test2.class, "files", true)
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
for (var p : patterns) {
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x,"test1/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x,"test1/dir/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x,"test1/dir/dir/"+p));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x," / test1/"+p+" / "));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x," / test1/dir/"+p+" / "));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x," / test1/dir/dir/"+p+" / "));
}
}
@Test void d03a_classpathRelative_localized_flat() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test.class, "files", false)
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
var patterns_ja = a("_a_ja.txt","_b_ja",".c",".d_ja.txt","e_ja.txt","f_ja","g_foo_ja.txt","h.foo_ja.txt","i_foo_ja");
var patterns_ja_JP = a("_a_ja_JP.txt","_b_ja_JP",".c",".d_ja_JP.txt","e_ja_JP.txt","f_ja_JP","g_foo_ja_JP.txt","h.foo_ja_JP.txt","i_foo_ja_JP");
for (var i = 0; i < patterns.length; i++) {
var p = patterns[i];
var p_ja = patterns_ja[i];
var p_ja_JP = patterns_ja_JP[i];
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x,"test1/"+p,null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x,"test1/dir/"+p,null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x,"test1/dir/dir/"+p,null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x,"/test1/"+p+"/",null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x,"/test1/dir/"+p+"/",null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x,"/test1/dir/dir/"+p+"/",null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p_ja+"]", read(x,"test1/"+p,JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p_ja+"]", read(x,"test1/dir/"+p,JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p_ja+"]", read(x,"test1/dir/dir/"+p,JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p_ja+"]", read(x,"/test1/"+p+"/",JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p_ja+"]", read(x,"/test1/dir/"+p+"/",JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p_ja+"]", read(x,"/test1/dir/dir/"+p+"/",JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p_ja_JP+"]", read(x,"test1/"+p,JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p_ja_JP+"]", read(x,"test1/dir/"+p,JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p_ja_JP+"]", read(x,"test1/dir/dir/"+p,JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p_ja_JP+"]", read(x,"/test1/"+p+"/",JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p_ja_JP+"]", read(x,"/test1/dir/"+p+"/",JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p_ja_JP+"]", read(x,"/test1/dir/dir/"+p+"/",JAPAN));
}
var badPatterns = a("test1/bad.txt","test1/../test1/_a.txt","bad.txt",null,"",".","..");
for (var p : badPatterns) {
assertNull(read(x,p,null));
assertNull(read(x,p,JAPANESE));
assertNull(read(x,p,JAPAN));
}
}
@Test void d03b_classpathRelative_localized_flat_recursive() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test2.class, "files", true)
.build();
var patterns = a("_a.txt","_b",".c",".d.txt","e.txt","f","g_foo.txt","h.foo.txt","i_foo");
var patterns_ja = a("_a_ja.txt","_b_ja",".c",".d_ja.txt","e_ja.txt","f_ja","g_foo_ja.txt","h.foo_ja.txt","i_foo_ja");
var patterns_ja_JP = a("_a_ja_JP.txt","_b_ja_JP",".c",".d_ja_JP.txt","e_ja_JP.txt","f_ja_JP","g_foo_ja_JP.txt","h.foo_ja_JP.txt","i_foo_ja_JP");
for (var i = 0; i < patterns.length; i++) {
var p = patterns[i];
var p_ja = patterns_ja[i];
var p_ja_JP = patterns_ja_JP[i];
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x,"test1/"+p,null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x,"test1/dir/"+p,null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x,"test1/dir/dir/"+p,null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p+"]", read(x,"/test1/"+p+"/",null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p+"]", read(x,"/test1/dir/"+p+"/",null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p+"]", read(x,"/test1/dir/dir/"+p+"/",null));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p_ja+"]", read(x,"test1/"+p,JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p_ja+"]", read(x,"test1/dir/"+p,JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p_ja+"]", read(x,"test1/dir/dir/"+p,JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p_ja+"]", read(x,"/test1/"+p+"/",JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p_ja+"]", read(x,"/test1/dir/"+p+"/",JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p_ja+"]", read(x,"/test1/dir/dir/"+p+"/",JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p_ja_JP+"]", read(x,"test1/"+p,JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p_ja_JP+"]", read(x,"test1/dir/"+p,JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p_ja_JP+"]", read(x,"test1/dir/dir/"+p,JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test1/"+p_ja_JP+"]", read(x,"/test1/"+p+"/",JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/"+p_ja_JP+"]", read(x,"/test1/dir/"+p+"/",JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test1/dir/dir/"+p_ja_JP+"]", read(x,"/test1/dir/dir/"+p+"/",JAPAN));
}
}
@Test void d04a_classpathRelative_localized_hierarchical() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test.class, "files/test2", false)
.build();
assertContains("[cp:/org/apache/juneau/cp/files/test2/a.txt]", read(x,"a.txt", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/a.txt]", read(x,"a.txt", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/a.txt]", read(x,"a.txt", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/a.txt]", read(x,"/a.txt/", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/a.txt]", read(x,"/a.txt/", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/a.txt]", read(x,"/a.txt/", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/dir/a.txt]", read(x,"dir/a.txt", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/dir/a.txt]", read(x,"dir/a.txt", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/dir/a.txt]", read(x,"dir/a.txt", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/dir/a.txt]", read(x,"/dir/a.txt/", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/dir/a.txt]", read(x,"/dir/a.txt/", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/dir/a.txt]", read(x,"/dir/a.txt/", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/dir/dir/a.txt]", read(x,"dir/dir/a.txt", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/dir/dir/a.txt]", read(x,"dir/dir/a.txt", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/dir/dir/a.txt]", read(x,"dir/dir/a.txt", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", JAPAN));
}
@Test void d04b_classpathRelative_localized_hierarchical_recursive() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test2.class, "files/test2", true)
.build();
assertContains("[cp:/org/apache/juneau/cp/files/test2/a.txt]", read(x,"a.txt", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/a.txt]", read(x,"a.txt", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/a.txt]", read(x,"a.txt", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/a.txt]", read(x,"/a.txt/", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/a.txt]", read(x,"/a.txt/", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/a.txt]", read(x,"/a.txt/", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/dir/a.txt]", read(x,"dir/a.txt", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/dir/a.txt]", read(x,"dir/a.txt", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/dir/a.txt]", read(x,"dir/a.txt", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/dir/a.txt]", read(x,"/dir/a.txt/", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/dir/a.txt]", read(x,"/dir/a.txt/", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/dir/a.txt]", read(x,"/dir/a.txt/", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/dir/dir/a.txt]", read(x,"dir/dir/a.txt", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/dir/dir/a.txt]", read(x,"dir/dir/a.txt", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/dir/dir/a.txt]", read(x,"dir/dir/a.txt", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/dir/dir/a.txt]", read(x,"/dir/dir/a.txt/", JAPAN));
FileFinder
.create()
.cp(FileFinder_Test2.class, "files/test2", true)
.exclude("(?i).*\\.(class|properties)")
.build();
}
@Test void d05_classpathRelative_ignorePattern() throws Exception {
var x = FileFinder
.create()
.cp(FileFinder_Test2.class, "files/test2", true)
.exclude("(?i).*\\.(txt)")
.build();
assertNull(read(x,"a.txt", null));
assertNull(read(x,"a.txt", JAPANESE));
assertNull(read(x,"a.txt", JAPAN));
assertNull(read(x,"dir/a.txt", null));
assertNull(read(x,"dir/a.txt", JAPANESE));
assertNull(read(x,"dir/a.txt", JAPAN));
assertNull(read(x,"dir/dir/a.txt", null));
assertNull(read(x,"dir/dir/a.txt", JAPANESE));
assertNull(read(x,"dir/dir/a.txt", JAPAN));
x = FileFinder
.create()
.cp(FileFinder_Test2.class, "files/test2", true)
.exclude("(?i).*\\.(TXT)")
.build();
assertNull(read(x,"a.txt", null));
assertNull(read(x,"a.txt", JAPANESE));
assertNull(read(x,"a.txt", JAPAN));
assertNull(read(x,"dir/a.txt", null));
assertNull(read(x,"dir/a.txt", JAPANESE));
assertNull(read(x,"dir/a.txt", JAPAN));
assertNull(read(x,"dir/dir/a.txt", null));
assertNull(read(x,"dir/dir/a.txt", JAPANESE));
assertNull(read(x,"dir/dir/a.txt", JAPAN));
x = FileFinder
.create()
.cp(FileFinder_Test2.class, "files/test2", true)
.exclude()
.build();
assertContains("[cp:/org/apache/juneau/cp/files/test2/a.txt]", read(x,"a.txt", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/a.txt]", read(x,"a.txt", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/a.txt]", read(x,"a.txt", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/dir/a.txt]", read(x,"dir/a.txt", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/dir/a.txt]", read(x,"dir/a.txt", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/dir/a.txt]", read(x,"dir/a.txt", JAPAN));
assertContains("[cp:/org/apache/juneau/cp/files/test2/dir/dir/a.txt]", read(x,"dir/dir/a.txt", null));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/dir/dir/a.txt]", read(x,"dir/dir/a.txt", JAPANESE));
assertContains("[cp:/org/apache/juneau/cp/files/test2/ja/JP/dir/dir/a.txt]", read(x,"dir/dir/a.txt", JAPAN));
}
//-----------------------------------------------------------------------------------------------------------------
// Other tests
//-----------------------------------------------------------------------------------------------------------------
@Test void e01_localDir_hashCode() {
var s = set(new LocalDir(Paths.get("test")), new LocalDir(Paths.get("test")));
assertSize(1, s);
s = set(new LocalDir(Paths.get("test")), new LocalDir(Paths.get("test","test")));
assertSize(2, s);
s = set(new LocalDir(List.class,null), new LocalDir(List.class,null));
assertSize(1, s);
s = set(new LocalDir(List.class,null), new LocalDir(List.class,"foo"));
assertSize(2, s);
s = set(new LocalDir(List.class,null), new LocalDir(String.class,null));
assertSize(2, s);
}
@Test void e02_caching() throws Exception {
var x = FileFinder
.create()
.dir(".")
.caching(100_000_000)
.build();
assertContains("[home:/files/test1/_a.txt]", read(x,"files/test1/_a.txt"));
assertContains("[home:/files/test1/_a.txt]", read(x,"files/test1/_a.txt"));
x = FileFinder
.create()
.dir(".")
.caching(1)
.build();
assertContains("[home:/files/test1/_a.txt]", read(x,"files/test1/_a.txt"));
assertContains("[home:/files/test1/_a.txt]", read(x,"files/test1/_a.txt"));
x = FileFinder
.create()
.cp(FileFinder_Test.class, "/", false)
.caching(100_000_000)
.build();
assertContains("[cp:/files/test1/_a.txt]", read(x,"files/test1/_a.txt"));
assertContains("[cp:/files/test1/_a.txt]", read(x,"files/test1/_a.txt"));
}
@Test void e03_subclassing() {
var x = E03b
.create()
.dir(".")
.caching(100_000_000)
.type(E03b.class)
.build();
assertInstanceOf(E03b.class, x);
}
public static class E03a extends FileFinder.Builder {
protected E03a() {
super(BeanStore.INSTANCE);
}
}
public static class E03b extends BasicFileFinder {
public static E03a create() {
return new E03a();
}
public E03b(E03a x) {
super(x);
}
}
} |
apache/qpid | 35,472 | qpid/tools/src/java/qpid-qmf2/src/main/java/org/apache/qpid/qmf2/util/ConnectionHelper.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.qpid.qmf2.util;
// JMS Imports
import javax.jms.ConnectionFactory;
import javax.jms.Connection;
import javax.jms.JMSException;
// JNDI Imports
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
// Simple Logging Facade 4 Java
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
// Misc Imports
import java.util.Map;
import java.util.Properties;
// Reuse this class as it provides a handy mechanism to parse an options String into a Map
import org.apache.qpid.messaging.util.AddressParser;
/**
* The Qpid M4 Java and C++ clients and the Python QMF tools all use different URL formats.
* This class provides helper methods to support a variety of URL formats and connection options
* in order to provide flexibility when creating connections.
* <p>
* Much of the following information is taken from <a href="https://cwiki.apache.org/qpid/url-format-proposal.html">
* New URL format for AMQP + Qpid</a>
* <p>
* <h3>AMQP 0-10 format</h3>
* C++ uses the AMQP 0-10 format: section 9.1.2 as follows:
* <pre>
* amqp_url = "amqp:" prot_addr_list
* prot_addr_list = [prot_addr ","]* prot_addr
* prot_addr = tcp_prot_addr | tls_prot_addr
*
* tcp_prot_addr = tcp_id tcp_addr
* tcp_id = "tcp:" | ""
* tcp_addr = [host [":" port] ]
* host = <as per <a href="http://www.ietf.org/rfc/rfc3986.txt">rfc3986</a>>
* port = number
* </pre>
* The AMQP 0-10 format only provides protocol address information for a (list of) brokers.
* <p>
* <p>
*
* <h3>Python tool BrokerURL format</h3>
* The Python tools bundled with Qpid such as qpid-config use a "BrokerURL" format with the following Address syntax:
* <pre>
* [<user>/<pass>@]<hostname> | <ip-address>[:<port>]
* </pre>
*
* <h3>Qpid M4 Java Connection URL format</h3>
* The Qpid M4 Java format provides additional options for connection options (user, password, vhost etc.)
* Documentation for this format may be found here: <a href="https://cwiki.apache.org/qpid/connection-url-format.html">
* Qpid M4 Java Connection URL Format</a>
* <p>
* Java ConnectionURLs look like this:
* <pre>
* amqp://[<user>:<pass>@][<clientid>]/<virtualhost>[?<option>='<value>'[&<option>='<value>']]
* </pre>
* This syntax is very powerful, but it can also be fairly complex to work with, especially when one realises
* that one of the options in the above syntax is brokerlist='<broker url>' where broker url is itself a URL
* of the format:
* <pre>
* <transport>://<host>[:<port>][?<option>='<value>'[&<option>='<value>']]
* </pre>
* so one may see ConnectionURLs that look like:
* <pre>
* {@literal amqp://guest:guest@clientid/test?brokerlist='tcp://localhost:5672?retries='10'&connectdelay='1000''}
* </pre>
*
* <p>
* <p>
* <h3>Extended AMQP 0-10 URL format</h3>
* There is a proposal to extend the AMQP 0-10 URL syntax to include user:pass@ style authentication
* information, virtual host and extensible name/value options. It also makes the implied extension points of
* the original grammar more explicit.
* <pre>
* amqp_url = "amqp://" [ userinfo "@" ] addr_list [ vhost ]
* addr_list = addr *( "," addr )
* addr = prot_addr [ options ]
* prot_addr = tcp_prot_addr | other_prot_addr
* vhost = "/" *pchar [ options ]
*
* tcp_prot_addr = tcp_id tcp_addr
* tcp_id = "tcp:" / "" ; tcp is the default
* tcp_addr = [ host [ ":" port ] ]
*
* other_prot_addr = other_prot_id ":" *pchar
* other_prot_id = scheme
*
* options = "?" option *( ";" option )
* option = name "=" value
* name = *pchar
* value = *pchar
* </pre>
*
* <h3>Incompatibility with AMQP 0-10 format</h3>
* This syntax is backward compatible with AMQP 0-10 with one exception: AMQP 0-10 did not have an initial
* // after amqp: The justification was that that the // form is only used for URIs with hierarchical structure
* <p>
* However it's been pointed out that in fact the URL does already specify a 1-level hierarchy of address / vhost.
* In the future the hierarchy could be extended to address objects within a vhost such as queues, exchanges etc.
* So this proposal adopts amqp:// syntax.
* <p>
* It's easy to write a backward-compatible parser by relaxing the grammar as follows:
* <pre>
* amqp_url = "amqp:" [ "//" ] [ userinfo "@" ] addr_list [ vhost ]
* </pre>
*
* <h3>Differences from Qpid M4 Java Connection URL format</h3>
* Addresses are at the start of the URL rather than in the "brokerlist" option.
* <p>
* Option format is {@literal ?foo=bar;x=y } rather than {@literal ?foo='bar'&x='y'}. The use of "'" quotes is not common for URI query
* strings. The use of "&" as a separator creates problems
* <p>
* user, pass and clientid are options rather than having a special place at the front of the URL. clientid is
* a Qpid proprietary property and user/pass are not relevant in all authentication schemes.
* <p>
* Qpid M4 Java URLs requires the brokerlist option, so this is an easy way to detect a Qpid M4 URL vs. an
* Extended AMQP 0-10 URL and parse accordingly.
*
* <h3>Options</h3>
* Some of the URL forms are fairly limited in terms of options, so it is useful to be able to pass options as
* an additional string, though it's important to note that if multiple brokers are supplied in the AMQP 0.10 format
* the same options will be applied to all brokers.
* <p>
* The option format is the same as that of the C++ qpid::messaging Connection class. for example: "{reconnect: true,
* tcp-nodelay: true}":
* <p>
* <table summary="Connection Options" width="100%" border="1"><thead>
* <tr><th>option name</th><th>value type</th><th>semantics</th></tr></thead><tbody>
* <tr>
* <td><code class="literal">maxprefetch</code></td>
* <td>integer</td>
* <td>The maximum number of pre-fetched messages per destination.</td>
* </tr>
* <tr>
* <td><code class="literal">sync_publish</code></td>
* <td>{'persistent' | 'all'}</td>
* <td>A sync command is sent after every persistent message to guarantee that it has been received; if the
* value is 'persistent', this is done only for persistent messages.</td>
* </tr>
* <tr>
* <td><code class="literal">sync_ack</code></td>
* <td>boolean</td>
* <td>A sync command is sent after every acknowledgement to guarantee that it has been received.</td>
* </tr>
* <tr>
* <td><code class="literal">use_legacy_map_msg_format</code></td>
* <td>boolean</td>
* <td>If you are using JMS Map messages and deploying a new client with any JMS client older than 0.8 release,
* you must set this to true to ensure the older clients can understand the map message encoding.</td>
* </tr>
* <tr>
* <td><code class="literal">failover</code></td>
* <td>{'roundrobin' | 'singlebroker' | 'nofailover' | 'failover_exchange'}</td>
* <td>If roundrobin is selected it will try each broker given in the broker list. If failover_exchange is
* selected it connects to the initial broker given in the broker URL and will receive membership updates
* via the failover exchange. </td>
* </tr>
* <tr>
* <td><code class="literal">cyclecount</code></td>
* <td>integer</td>
* <td>For roundrobin failover cyclecount is the number of times to loop through the list of available brokers
* before failure.</td>
* </tr>
* <tr>
* <td><code class="literal">username</code></td>
* <td>string</td>
* <td>The username to use when authenticating to the broker.</td>
* </tr>
* <tr>
* <td><code class="literal">password</code></td>
* <td>string</td>
* <td>The password to use when authenticating to the broker.</td>
* </tr>
* <tr>
* <td><code class="literal">sasl_mechanisms</code></td>
* <td>string</td>
* <td>The specific SASL mechanisms to use when authenticating to the broker. The value is a space separated list.</td>
* </tr>
* <tr>
* <td><code class="literal">sasl_mechs</code></td>
* <td>string</td>
* <td>The specific SASL mechanisms to use when authenticating to the broker. The value is a space separated
* is a space separated list. This is simply a synonym for sasl_mechanisms above</td>
* </tr>
* <tr>
* <td><code class="literal">sasl_encryption</code></td>
* <td>boolean</td>
* <td>If <code class="literal">sasl_encryption='true'</code>, the JMS client attempts to negotiate a security
* layer with the broker using GSSAPI to encrypt the connection. Note that for this to happen, GSSAPI must
* be selected as the sasl_mech.</td>
* </tr>
* <tr>
* <td><code class="literal">ssl</code></td>
* <td>boolean</td>
* <td>If <code class="literal">ssl='true'</code>, the JMS client will encrypt the connection using SSL.</td>
* </tr>
* <tr>
* <td><code class="literal">reconnect</code></td>
* <td>boolean</td>
* <td>Transparently reconnect if the connection is lost.</td>
* </tr>
* <tr>
* <td><code class="literal">reconnect_timeout</code></td>
* <td>integer</td>
* <td>Total number of seconds to continue reconnection attempts before giving up and raising an exception.</td>
* </tr>
* <tr>
* <td><code class="literal">reconnect_limit</code></td>
* <td>integer</td>
* <td>Maximum number of reconnection attempts before giving up and raising an exception.</td>
* </tr>
* <tr>
* <td><code class="literal">reconnect_interval_min</code></td>
* <td>integer representing time in seconds</td>
* <td> Minimum number of seconds between reconnection attempts. The first reconnection attempt is made
* immediately; if that fails, the first reconnection delay is set to the value of <code class="literal">
* reconnect_interval_min</code>; if that attempt fails, the reconnect interval increases exponentially
* until a reconnection attempt succeeds or <code class="literal">reconnect_interval_max</code> is reached.</td>
* </tr>
* <tr>
* <td><code class="literal">reconnect_interval_max</code></td>
* <td>integer representing time in seconds</td>
* <td>Maximum reconnect interval.</td>
* </tr>
* <tr>
* <td><code class="literal">reconnect_interval</code></td>
* <td>integer representing time in seconds</td>
* <td>Sets both <code class="literal">reconnection_interval_min</code> and <code class="literal">
* reconnection_interval_max</code> to the same value. The default value is 5 seconds</td>
* </tr>
* <tr>
* <td><code class="literal">heartbeat</code></td>
* <td>integer representing time in seconds</td>
* <td>Requests that heartbeats be sent every N seconds. If two successive heartbeats are missed the connection is
* considered to be lost.</td>
* </tr>
* <tr>
* <td><code class="literal">protocol</code></td>
* <td>string</td>
* <td>Sets the underlying protocol used. The default option is 'tcp'. To enable ssl, set to 'ssl'. The C++ client
* additionally supports 'rdma'. </td>
* </tr>
* <tr>
* <td><code class="literal">tcp-nodelay</code></td>
* <td>boolean</td>
* <td>Set tcp no-delay, i.e. disable Nagle algorithm.</td>
* </tr>
* <tr>
* <td><code class="literal">sasl_protocol</code></td>
* <td>string</td>
* <td>Used only for Kerberos. <code class="literal">sasl_protocol</code> must be set to the principal for the
* qpidd broker, e.g. <code class="literal">qpidd/</code></td>
* </tr>
* <tr>
* <td><code class="literal">sasl_server</code></td>
* <td>string</td>
* <td>For Kerberos, sasl_mechs must be set to GSSAPI, <code class="literal">sasl_server</code> must be set to
* the host for the SASL server, e.g. <code class="literal">sasl.com.</code></td>
* </tr>
* <tr>
* <td><code class="literal">trust_store</code></td>
* <td>string</td>
* <td>path to Keberos trust store</td>
* </tr>
* <tr>
* <td><code class="literal">trust_store_password</code></td>
* <td>string</td>
* <td>Kerberos trust store password</td>
* </tr>
* <tr>
* <td><code class="literal">key_store</code></td>
* <td>string</td>
* <td>path to Kerberos key store </td>
* </tr>
* <tr>
* <td><code class="literal">key_store_password</code></td>
* <td>string</td>
* <td>Kerberos key store password</td>
* </tr>
* <tr>
* <td><code class="literal">ssl_cert_alias</code></td>
* <td>string</td>
* <td>If multiple certificates are present in the keystore, the alias will be used to extract the correct
* certificate.</td>
* </tr>
* </tbody></table>
*
* <h3>Other features of this class</h3>
* Whilst it is generally the norm to use JNDI to specify Connections, Destinations etc. it is also often quite useful
* to specify Connections programmatically, for example when writing a tool one may wish to specify the broker via the
* command line to enable the tool to connect to different broker instances.
* To facilitate this this class provides a basic createConnection() method that takes a URL and returns a JMS
* Connection.
*
* @author Fraser Adams
*/
public final class ConnectionHelper
{
private static final Logger _log = LoggerFactory.getLogger(ConnectionHelper.class);
/**
* Make constructor private as the class comprises only static helper methods.
*/
private ConnectionHelper()
{
}
/**
* Create a ConnectionURL from the proposed Extended AMQP 0.10 URL format. This is experimental and may or may
* not work. Options are assumed to be the same as the Java Connection URL, which will probably not be the case
* if this URL form is ultimately adopted. For example the example URLs have "amqp://host1,host2?retry=2,host3"
* whereas the Java Connection URL uses &retries=2
*
* I'm not overly keen on this code it looks pretty inelegant and I'm slightly embarrassed by it, but it
* is really just an experiment.
*
* @param url the input URL.
* @param username the username.
* @param password the password.
* @return a String containing the Java Connection URL.
*/
private static String parseExtendedAMQPURL(String url, String username, String password)
{
String vhost = ""; // Specifying an empty vhost uses default Virtual Host.
String urlOptions = "";
String brokerList = "";
url = url.substring(7); // Chop off "amqp://"
String[] split = url.split("@"); // First break out the userinfo if present
String remainder = split[0];
if (split.length == 2)
{ // Extract username and password from the userinfo field
String[] userinfo = split[0].split(":");
remainder = split[1];
username = userinfo[0];
if (userinfo.length == 2)
{
password = userinfo[1];
}
}
// Replace foo=baz with foo='baz'. There's probably a more elegant way to do this using a fancy
// regex, but unfortunately I'm not terribly good at regexes so this is the brute force approach :-(
// OTOH it's probably more readable and obvious than a regex to do the same thing would be.
split = remainder.split("=");
StringBuilder buf = new StringBuilder(split[0]);
for (int i = 1; i < split.length; i++)
{
String substring = "='" + split[i];
if (substring.contains(";"))
{
substring = substring.replaceFirst(";", "'&"); // Note we also replace the option separator here
}
else if (substring.contains("/"))
{
substring = substring.replaceFirst("/", "'/");
}
else if (substring.contains(","))
{
substring = substring.replaceFirst(",", "',");
}
else
{
substring = substring + "'";
}
buf.append(substring);
}
remainder = buf.toString();
// Now split into addrList and vhost parts (see Javadoc for the grammar of this URL format)
split = remainder.split("/"); // vhost starts with a mandatory '/' character
String[] addrSplit = split[0].split(","); // prot_addrs are comma separated
boolean firstBroker = true;
buf = new StringBuilder();
for (String broker : addrSplit)
{ // Iterate through the address list creating brokerList style URLs
broker = broker.trim();
String protocol = "tcp"; // set default protocol
String[] components = broker.split(":");
// Note protocols other than tcp and vm are not supported by the Connection URL so the results
// are pretty much undefined if other protocols are passed on the input URL.
if (components.length == 1)
{ // Assume protocol = tcp and broker = hostname
}
else if (components.length == 2)
{ // Probably host:port but could be some other protocol in and Extended AMQP 0.10 URL
try
{ // Somewhat ugly, but effective test to check if the second component is an integer
Integer.parseInt(components[1]);
// If the above succeeds the components are likely host:port
broker = components[0] + ":" + components[1];
}
catch (NumberFormatException nfe)
{ // If the second component isn't an integer then it's likely a wacky protocol...
protocol = components[0];
broker = components[1];
}
}
else if (components.length == 3)
{
protocol = components[0];
broker = components[1] + ":" + components[2];
}
if (firstBroker)
{
buf.append(protocol + "://" + broker);
}
else
{ // https://cwiki.apache.org/qpid/connection-url-format.html says "A minimum of one broker url is
// required additional URLs are semi-colon(';') delimited."
buf.append(";" + protocol + "://" + broker);
}
firstBroker = false;
}
brokerList = "'" + buf.toString() + "'";
if (split.length == 2)
{ // Extract the vhost and any connection level options
vhost = split[1];
String[] split2 = vhost.split("\\?"); // Look for options
vhost = split2[0];
if (split2.length == 2)
{
urlOptions = "&" + split2[1];
}
}
String connectionURL = "amqp://" + username + ":" + password + "@QpidJMS/" + vhost + "?brokerlist=" +
brokerList + urlOptions;
return connectionURL;
}
/**
* If no explicit username is supplied then explicitly set sasl mechanism to ANONYMOUS. If this isn't done
* The default is PLAIN which causes the broker to fail with "warning Failed to retrieve sasl username".
*
* @param username the previously extracted username.
* @param brokerListOptions the brokerList options extracted so far.
* @return the brokerList options adjusted with sasl_mechs='ANONYMOUS' if no username has been supplied.
*/
private static String adjustBrokerListOptions(final String username, final String brokerListOptions)
{
if (username.equals(""))
{
if (brokerListOptions.equals(""))
{
return "?sasl_mechs='ANONYMOUS'";
}
else
{
if (brokerListOptions.contains("sasl_mechs"))
{
return brokerListOptions;
}
else
{
return brokerListOptions + "&sasl_mechs='ANONYMOUS'";
}
}
}
else
{
return brokerListOptions;
}
}
/**
* Create a ConnectionURL from the input "generic" URL.
*
* @param url the input URL.
* @param username the username.
* @param password the password.
* @param urlOptions the pre-parsed set of connection level options.
* @param brokerListOptions the pre-parsed set of specific brokerList options.
* @return a String containing the Java Connection URL.
*/
private static String parseURL(String url, String username, String password,
String urlOptions, String brokerListOptions)
{
if (url.startsWith("amqp://"))
{ // Somewhat experimental. This new format is only a "proposed" format
return parseExtendedAMQPURL(url, username, password);
}
String vhost = ""; // Specifying an empty vhost uses default Virtual Host.
String brokerList = "";
if (url.startsWith("amqp:"))
{ // AMQP 0.10 URL format
url = url.substring(5); // Chop off "amqp:"
String[] addrSplit = url.split(","); // prot_addrs are comma separated
boolean firstBroker = true;
brokerListOptions = adjustBrokerListOptions(username, brokerListOptions);
StringBuilder buf = new StringBuilder();
for (String broker : addrSplit)
{ // Iterate through the address list creating brokerList style URLs
broker = broker.trim();
if (broker.startsWith("tcp:"))
{ // Only tcp is supported in an AMQP 0.10 prot_addr so we *should* only have to account for
// a "tcp:" prefix when normalising broker URLs
broker = broker.substring(4); // Chop off "tcp:"
}
if (firstBroker)
{
buf.append("tcp://" + broker + brokerListOptions);
}
else
{ // https://cwiki.apache.org/qpid/connection-url-format.html says "A minimum of one broker url is
// required additional URLs are semi-colon(';') delimited."
buf.append(";tcp://" + broker + brokerListOptions);
}
firstBroker = false;
}
brokerList = "'" + buf.toString() + "'";
}
else if (url.contains("@"))
{ // BrokerURL format as used in the Python tools.
String[] split = url.split("@");
url = split[1];
split = split[0].split("[/:]"); // Accept both <username>/<password> and <username>:<password>
username = split[0];
if (split.length == 2)
{
password = split[1];
}
brokerListOptions = adjustBrokerListOptions(username, brokerListOptions);
brokerList = "'tcp://" + url + brokerListOptions + "'";
}
else
{ // Basic host:port format
brokerListOptions = adjustBrokerListOptions(username, brokerListOptions);
brokerList = "'tcp://" + url + brokerListOptions + "'";
}
String connectionURL = "amqp://" + username + ":" + password + "@QpidJMS/" + vhost + "?brokerlist=" +
brokerList + urlOptions;
return connectionURL;
}
/**
* Creates a Java Connection URL from one of the other supported URL formats.
*
* @param url an AMQP 0.10 URL, an extended AMQP 0-10 URL, a Broker URL or a Connection URL (the latter is simply
* returned untouched).
* @return a String containing the Java Connection URL.
*/
public static String createConnectionURL(String url)
{
return createConnectionURL(url, null);
}
/**
* Creates a Java Connection URL from one of the other supported URL formats plus options.
*
* @param url an AMQP 0.10 URL, an extended AMQP 0-10 URL, a Broker URL or a Connection URL (the latter is simply
* returned untouched).
* @param opts a String containing the options encoded using the same form as the C++ qpid::messaging
* Connection class.
* @return a String containing the Java Connection URL.
*/
public static String createConnectionURL(String url, String opts)
{
// This method is actually mostly about parsing the options, when the options are extracted it delegates
// to parseURL() to do the actual URL parsing.
// If a Java Connection URL has been passed in we simply return it.
if (url.startsWith("amqp://") && url.contains("brokerlist"))
{
return url;
}
// Initialise options to default values
String username = "";
String password = "";
String urlOptions = "";
String brokerListOptions = "";
// Get options from option String
if (opts != null && opts.startsWith("{") && opts.endsWith("}"))
{
// Connection URL Options
String maxprefetch = "";
String sync_publish = "";
String sync_ack = "";
String use_legacy_map_msg_format = "";
String failover = "";
// Broker List Options
String heartbeat = "";
String retries = "";
String connectdelay = "";
String connecttimeout = "";
String tcp_nodelay = "";
String sasl_mechs = "";
String sasl_encryption = "";
String sasl_protocol = "";
String sasl_server = "";
String ssl = "";
String ssl_verify_hostname = "";
String ssl_cert_alias = "";
String trust_store = "";
String trust_store_password = "";
String key_store = "";
String key_store_password = "";
Map options = new AddressParser(opts).map();
if (options.containsKey("maxprefetch"))
{
maxprefetch = "&maxprefetch='" + options.get("maxprefetch").toString() + "'";
}
if (options.containsKey("sync_publish"))
{
sync_publish = "&sync_publish='" + options.get("sync_publish").toString() + "'";
}
if (options.containsKey("sync_ack"))
{
sync_ack = "&sync_ack='" + options.get("sync_ack").toString() + "'";
}
if (options.containsKey("use_legacy_map_msg_format"))
{
use_legacy_map_msg_format = "&use_legacy_map_msg_format='" +
options.get("use_legacy_map_msg_format").toString() + "'";
}
if (options.containsKey("failover"))
{
if (options.containsKey("cyclecount"))
{
failover = "&failover='" + options.get("failover").toString() + "?cyclecount='" +
options.get("cyclecount").toString() + "''";
}
else
{
failover = "&failover='" + options.get("failover").toString() + "'";
}
}
if (options.containsKey("username"))
{
username = options.get("username").toString();
}
if (options.containsKey("password"))
{
password = options.get("password").toString();
}
if (options.containsKey("reconnect"))
{
String value = options.get("reconnect").toString();
if (value.equalsIgnoreCase("true"))
{
retries = "&retries='" + Integer.MAX_VALUE + "'";
connectdelay = "&connectdelay='5000'";
}
}
if (options.containsKey("reconnect_limit"))
{
retries = "&retries='" + options.get("reconnect_limit").toString() + "'";
}
if (options.containsKey("reconnect_interval"))
{
connectdelay = "&connectdelay='" + options.get("reconnect_interval").toString() + "000'";
}
if (options.containsKey("reconnect_interval_min"))
{
connectdelay = "&connectdelay='" + options.get("reconnect_interval_min").toString() + "000'";
}
if (options.containsKey("reconnect_interval_max"))
{
connectdelay = "&connectdelay='" + options.get("reconnect_interval_max").toString() + "000'";
}
if (options.containsKey("reconnect_timeout"))
{
connecttimeout = "&connecttimeout='" + options.get("reconnect_timeout").toString() + "000'";
}
if (options.containsKey("heartbeat"))
{
heartbeat = "&heartbeat='" + options.get("heartbeat").toString() + "'";
}
if (options.containsKey("tcp-nodelay"))
{
tcp_nodelay = "&tcp_nodelay='" + options.get("tcp-nodelay").toString() + "'";
}
if (options.containsKey("sasl_mechanisms"))
{
sasl_mechs = "&sasl_mechs='" + options.get("sasl_mechanisms").toString() + "'";
}
if (options.containsKey("sasl_mechs"))
{
sasl_mechs = "&sasl_mechs='" + options.get("sasl_mechs").toString() + "'";
}
if (options.containsKey("sasl_encryption"))
{
sasl_encryption = "&sasl_encryption='" + options.get("sasl_encryption").toString() + "'";
}
if (options.containsKey("sasl_protocol"))
{
sasl_protocol = "&sasl_protocol='" + options.get("sasl_protocol").toString() + "'";
}
if (options.containsKey("sasl_server"))
{
sasl_server = "&sasl_server='" + options.get("sasl_server").toString() + "'";
}
if (options.containsKey("trust_store"))
{
trust_store = "&trust_store='" + options.get("trust_store").toString() + "'";
}
if (options.containsKey("trust_store_password"))
{
trust_store_password = "&trust_store_password='" + options.get("trust_store_password").toString() + "'";
}
if (options.containsKey("key_store"))
{
key_store = "&key_store='" + options.get("key_store").toString() + "'";
}
if (options.containsKey("key_store_password"))
{
key_store_password = "&key_store_password='" + options.get("key_store_password").toString() + "'";
}
if (options.containsKey("protocol"))
{
String value = options.get("protocol").toString();
if (value.equalsIgnoreCase("ssl"))
{
ssl = "&ssl='true'";
if (options.containsKey("ssl_verify_hostname"))
{
ssl_verify_hostname = "&ssl_verify_hostname='" + options.get("ssl_verify_hostname").toString() + "'";
}
if (options.containsKey("ssl_cert_alias"))
{
ssl_cert_alias = "&ssl_cert_alias='" + options.get("ssl_cert_alias").toString() + "'";
}
}
}
urlOptions = maxprefetch + sync_publish + sync_ack + use_legacy_map_msg_format + failover;
brokerListOptions = heartbeat + retries + connectdelay + connecttimeout + tcp_nodelay +
sasl_mechs + sasl_encryption + sasl_protocol + sasl_server +
ssl + ssl_verify_hostname + ssl_cert_alias +
trust_store + trust_store_password + key_store + key_store_password;
if (brokerListOptions.startsWith("&"))
{
brokerListOptions = "?" + brokerListOptions.substring(1);
}
}
return parseURL(url, username, password, urlOptions, brokerListOptions);
}
/**
* Creates a JMS Connection from one of the supported URL formats.
*
* @param url an AMQP 0.10 URL, an extended AMQP 0-10 URL, a Broker URL or a Connection URL.
* @return a javax.jms.Connection.
*/
public static Connection createConnection(String url)
{
return createConnection(url, null);
}
/**
* Creates a JMS Connection from one of the supported URL formats plus options.
*
* @param url an AMQP 0.10 URL, an extended AMQP 0-10 URL, a Broker URL or a Connection URL.
* @param opts a String containing the options encoded using the same form as the C++ qpid::messaging
* Connection class.
* @return a javax.jms.Connection.
*/
public static Connection createConnection(String url, String opts)
{
String connectionUrl = createConnectionURL(url, opts);
_log.info("ConnectionHelper.createConnection() {}", connectionUrl);
// Initialise JNDI names etc into properties
Properties props = new Properties();
props.setProperty("java.naming.factory.initial", "org.apache.qpid.jndi.PropertiesFileInitialContextFactory");
props.setProperty("connectionfactory.ConnectionFactory", connectionUrl);
Connection connection = null;
try
{
Context jndi = new InitialContext(props);
ConnectionFactory connectionFactory = (ConnectionFactory)jndi.lookup("ConnectionFactory");
connection = connectionFactory.createConnection();
}
catch (NamingException ne)
{
_log.info("NamingException {} caught in createConnection()", ne.getMessage());
}
catch (JMSException jmse)
{
_log.info("JMSException {} caught in createConnection()", jmse.getMessage());
}
return connection;
}
}
|
google/guava | 35,968 | android/guava-tests/test/com/google/common/collect/MapMakerInternalMapTest.java | /*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import static com.google.common.collect.MapMakerInternalMap.DRAIN_THRESHOLD;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.base.Equivalence;
import com.google.common.collect.MapMakerInternalMap.InternalEntry;
import com.google.common.collect.MapMakerInternalMap.Segment;
import com.google.common.collect.MapMakerInternalMap.Strength;
import com.google.common.collect.MapMakerInternalMap.WeakValueEntry;
import com.google.common.collect.MapMakerInternalMap.WeakValueReference;
import com.google.common.testing.NullPointerTester;
import java.lang.ref.Reference;
import java.util.concurrent.atomic.AtomicReferenceArray;
import junit.framework.TestCase;
import org.jspecify.annotations.NullUnmarked;
/**
* @author Charles Fry
*/
@SuppressWarnings("deprecation") // many tests of deprecated methods
@NullUnmarked
public class MapMakerInternalMapTest extends TestCase {
static final int SMALL_MAX_SIZE = DRAIN_THRESHOLD * 5;
private static <K, V>
MapMakerInternalMap<K, V, ? extends InternalEntry<K, V, ?>, ? extends Segment<K, V, ?, ?>>
makeMap(MapMaker maker) {
return MapMakerInternalMap.create(maker);
}
private static MapMaker createMapMaker() {
MapMaker maker = new MapMaker();
maker.useCustomMap = true;
return maker;
}
// constructor tests
public void testDefaults() {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(createMapMaker());
assertSame(Strength.STRONG, map.keyStrength());
assertSame(Strength.STRONG, map.valueStrength());
assertSame(map.keyStrength().defaultEquivalence(), map.keyEquivalence);
assertSame(map.valueStrength().defaultEquivalence(), map.valueEquivalence());
assertThat(map.entryHelper)
.isInstanceOf(MapMakerInternalMap.StrongKeyStrongValueEntry.Helper.class);
assertEquals(4, map.concurrencyLevel);
// concurrency level
assertThat(map.segments).hasLength(4);
// initial capacity / concurrency level
assertEquals(16 / map.segments.length, map.segments[0].table.length());
}
public void testSetKeyEquivalence() {
Equivalence<Object> testEquivalence =
new Equivalence<Object>() {
@Override
protected boolean doEquivalent(Object a, Object b) {
return false;
}
@Override
protected int doHash(Object t) {
return 0;
}
};
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().keyEquivalence(testEquivalence));
assertSame(testEquivalence, map.keyEquivalence);
assertSame(map.valueStrength().defaultEquivalence(), map.valueEquivalence());
}
public void testSetConcurrencyLevel() {
// round up to the nearest power of two
checkConcurrencyLevel(1, 1);
checkConcurrencyLevel(2, 2);
checkConcurrencyLevel(3, 4);
checkConcurrencyLevel(4, 4);
checkConcurrencyLevel(5, 8);
checkConcurrencyLevel(6, 8);
checkConcurrencyLevel(7, 8);
checkConcurrencyLevel(8, 8);
}
private static void checkConcurrencyLevel(int concurrencyLevel, int segmentCount) {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(concurrencyLevel));
assertThat(map.segments).hasLength(segmentCount);
}
public void testSetInitialCapacity() {
// share capacity over each segment, then round up to the nearest power of two
checkInitialCapacity(1, 0, 1);
checkInitialCapacity(1, 1, 1);
checkInitialCapacity(1, 2, 2);
checkInitialCapacity(1, 3, 4);
checkInitialCapacity(1, 4, 4);
checkInitialCapacity(1, 5, 8);
checkInitialCapacity(1, 6, 8);
checkInitialCapacity(1, 7, 8);
checkInitialCapacity(1, 8, 8);
checkInitialCapacity(2, 0, 1);
checkInitialCapacity(2, 1, 1);
checkInitialCapacity(2, 2, 1);
checkInitialCapacity(2, 3, 2);
checkInitialCapacity(2, 4, 2);
checkInitialCapacity(2, 5, 4);
checkInitialCapacity(2, 6, 4);
checkInitialCapacity(2, 7, 4);
checkInitialCapacity(2, 8, 4);
checkInitialCapacity(4, 0, 1);
checkInitialCapacity(4, 1, 1);
checkInitialCapacity(4, 2, 1);
checkInitialCapacity(4, 3, 1);
checkInitialCapacity(4, 4, 1);
checkInitialCapacity(4, 5, 2);
checkInitialCapacity(4, 6, 2);
checkInitialCapacity(4, 7, 2);
checkInitialCapacity(4, 8, 2);
}
private static void checkInitialCapacity(
int concurrencyLevel, int initialCapacity, int segmentSize) {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(
createMapMaker().concurrencyLevel(concurrencyLevel).initialCapacity(initialCapacity));
for (int i = 0; i < map.segments.length; i++) {
assertEquals(segmentSize, map.segments[i].table.length());
}
}
public void testSetWeakKeys() {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(createMapMaker().weakKeys());
checkStrength(map, Strength.WEAK, Strength.STRONG);
assertThat(map.entryHelper)
.isInstanceOf(MapMakerInternalMap.WeakKeyStrongValueEntry.Helper.class);
}
public void testSetWeakValues() {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(createMapMaker().weakValues());
checkStrength(map, Strength.STRONG, Strength.WEAK);
assertThat(map.entryHelper)
.isInstanceOf(MapMakerInternalMap.StrongKeyWeakValueEntry.Helper.class);
}
private static void checkStrength(
MapMakerInternalMap<Object, Object, ?, ?> map, Strength keyStrength, Strength valueStrength) {
assertSame(keyStrength, map.keyStrength());
assertSame(valueStrength, map.valueStrength());
assertSame(keyStrength.defaultEquivalence(), map.keyEquivalence);
assertSame(valueStrength.defaultEquivalence(), map.valueEquivalence());
}
// Segment core tests
public void testNewEntry() {
for (MapMaker maker : allWeakValueStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker);
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
Object valueOne = new Object();
int hashOne = map.hash(keyOne);
InternalEntry<Object, Object, ?> entryOne = segment.newEntryForTesting(keyOne, hashOne, null);
WeakValueReference<Object, Object, ?> valueRefOne =
segment.newWeakValueReferenceForTesting(entryOne, valueOne);
assertSame(valueOne, valueRefOne.get());
segment.setWeakValueReferenceForTesting(entryOne, valueRefOne);
assertSame(keyOne, entryOne.getKey());
assertEquals(hashOne, entryOne.getHash());
assertNull(entryOne.getNext());
assertSame(valueRefOne, segment.getWeakValueReferenceForTesting(entryOne));
Object keyTwo = new Object();
Object valueTwo = new Object();
int hashTwo = map.hash(keyTwo);
InternalEntry<Object, Object, ?> entryTwo =
segment.newEntryForTesting(keyTwo, hashTwo, entryOne);
WeakValueReference<Object, Object, ?> valueRefTwo =
segment.newWeakValueReferenceForTesting(entryTwo, valueTwo);
assertSame(valueTwo, valueRefTwo.get());
segment.setWeakValueReferenceForTesting(entryTwo, valueRefTwo);
assertSame(keyTwo, entryTwo.getKey());
assertEquals(hashTwo, entryTwo.getHash());
assertSame(entryOne, entryTwo.getNext());
assertSame(valueRefTwo, segment.getWeakValueReferenceForTesting(entryTwo));
}
}
public void testCopyEntry() {
for (MapMaker maker : allWeakValueStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker);
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
Object valueOne = new Object();
int hashOne = map.hash(keyOne);
InternalEntry<Object, Object, ?> entryOne = segment.newEntryForTesting(keyOne, hashOne, null);
segment.setValueForTesting(entryOne, valueOne);
Object keyTwo = new Object();
Object valueTwo = new Object();
int hashTwo = map.hash(keyTwo);
InternalEntry<Object, Object, ?> entryTwo = segment.newEntryForTesting(keyTwo, hashTwo, null);
segment.setValueForTesting(entryTwo, valueTwo);
InternalEntry<Object, Object, ?> copyOne = segment.copyForTesting(entryOne, null);
assertSame(keyOne, entryOne.getKey());
assertEquals(hashOne, entryOne.getHash());
assertNull(entryOne.getNext());
assertSame(valueOne, copyOne.getValue());
InternalEntry<Object, Object, ?> copyTwo = segment.copyForTesting(entryTwo, copyOne);
assertSame(keyTwo, copyTwo.getKey());
assertEquals(hashTwo, copyTwo.getHash());
assertSame(copyOne, copyTwo.getNext());
assertSame(valueTwo, copyTwo.getValue());
}
}
public void testSegmentGetAndContains() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
// TODO(fry): check recency ordering
Object key = new Object();
int hash = map.hash(key);
Object value = new Object();
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
int index = hash & (table.length() - 1);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
segment.setValueForTesting(entry, value);
assertNull(segment.get(key, hash));
// count == 0
segment.setTableEntryForTesting(index, entry);
assertNull(segment.get(key, hash));
assertFalse(segment.containsKey(key, hash));
assertFalse(segment.containsValue(value));
// count == 1
segment.count++;
assertSame(value, segment.get(key, hash));
assertTrue(segment.containsKey(key, hash));
assertTrue(segment.containsValue(value));
// don't see absent values now that count > 0
assertNull(segment.get(new Object(), hash));
// null key
InternalEntry<Object, Object, ?> nullEntry = segment.newEntryForTesting(null, hash, entry);
Object nullValue = new Object();
WeakValueReference<Object, Object, ?> nullValueRef =
segment.newWeakValueReferenceForTesting(nullEntry, nullValue);
segment.setWeakValueReferenceForTesting(nullEntry, nullValueRef);
segment.setTableEntryForTesting(index, nullEntry);
// skip the null key
assertSame(value, segment.get(key, hash));
assertTrue(segment.containsKey(key, hash));
assertTrue(segment.containsValue(value));
assertFalse(segment.containsValue(nullValue));
// hash collision
InternalEntry<Object, Object, ?> dummyEntry =
segment.newEntryForTesting(new Object(), hash, entry);
Object dummyValue = new Object();
WeakValueReference<Object, Object, ?> dummyValueRef =
segment.newWeakValueReferenceForTesting(dummyEntry, dummyValue);
segment.setWeakValueReferenceForTesting(dummyEntry, dummyValueRef);
segment.setTableEntryForTesting(index, dummyEntry);
assertSame(value, segment.get(key, hash));
assertTrue(segment.containsKey(key, hash));
assertTrue(segment.containsValue(value));
assertTrue(segment.containsValue(dummyValue));
// key collision
dummyEntry = segment.newEntryForTesting(key, hash, entry);
dummyValue = new Object();
dummyValueRef = segment.newWeakValueReferenceForTesting(dummyEntry, dummyValue);
segment.setWeakValueReferenceForTesting(dummyEntry, dummyValueRef);
segment.setTableEntryForTesting(index, dummyEntry);
// returns the most recent entry
assertSame(dummyValue, segment.get(key, hash));
assertTrue(segment.containsKey(key, hash));
assertTrue(segment.containsValue(value));
assertTrue(segment.containsValue(dummyValue));
}
public void testSegmentReplaceValue() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
// TODO(fry): check recency ordering
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
Object newValue = new Object();
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
int index = hash & (table.length() - 1);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
// no entry
assertFalse(segment.replace(key, hash, oldValue, newValue));
assertEquals(0, segment.count);
// same value
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
assertTrue(segment.replace(key, hash, oldValue, newValue));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
// different value
assertFalse(segment.replace(key, hash, oldValue, newValue));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
// cleared
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
oldValueRef.clear();
assertFalse(segment.replace(key, hash, oldValue, newValue));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
}
public void testSegmentReplace() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
// TODO(fry): check recency ordering
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
Object newValue = new Object();
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
int index = hash & (table.length() - 1);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
// no entry
assertNull(segment.replace(key, hash, newValue));
assertEquals(0, segment.count);
// same key
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
assertSame(oldValue, segment.replace(key, hash, newValue));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
// cleared
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
oldValueRef.clear();
assertNull(segment.replace(key, hash, newValue));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
}
public void testSegmentPut() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
// TODO(fry): check recency ordering
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
Object newValue = new Object();
// no entry
assertEquals(0, segment.count);
assertNull(segment.put(key, hash, oldValue, false));
assertEquals(1, segment.count);
// same key
assertSame(oldValue, segment.put(key, hash, newValue, false));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
// cleared
InternalEntry<Object, Object, ?> entry = segment.getEntry(key, hash);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
assertSame(oldValue, segment.get(key, hash));
oldValueRef.clear();
assertNull(segment.put(key, hash, newValue, false));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
}
public void testSegmentPutIfAbsent() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
// TODO(fry): check recency ordering
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
Object newValue = new Object();
// no entry
assertEquals(0, segment.count);
assertNull(segment.put(key, hash, oldValue, true));
assertEquals(1, segment.count);
// same key
assertSame(oldValue, segment.put(key, hash, newValue, true));
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
// cleared
InternalEntry<Object, Object, ?> entry = segment.getEntry(key, hash);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
assertSame(oldValue, segment.get(key, hash));
oldValueRef.clear();
assertNull(segment.put(key, hash, newValue, true));
assertEquals(1, segment.count);
assertSame(newValue, segment.get(key, hash));
}
public void testSegmentPut_expand() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
assertEquals(1, segment.table.length());
int count = 1024;
for (int i = 0; i < count; i++) {
Object key = new Object();
Object value = new Object();
int hash = map.hash(key);
assertNull(segment.put(key, hash, value, false));
assertTrue(segment.table.length() > i);
}
}
public void testSegmentRemove() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
int index = hash & (table.length() - 1);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
// no entry
assertEquals(0, segment.count);
assertNull(segment.remove(key, hash));
assertEquals(0, segment.count);
// same key
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
assertSame(oldValue, segment.remove(key, hash));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
// cleared
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
oldValueRef.clear();
assertNull(segment.remove(key, hash));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
}
public void testSegmentRemoveValue() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object key = new Object();
int hash = map.hash(key);
Object oldValue = new Object();
Object newValue = new Object();
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
int index = hash & (table.length() - 1);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
WeakValueReference<Object, Object, ?> oldValueRef =
segment.newWeakValueReferenceForTesting(entry, oldValue);
segment.setWeakValueReferenceForTesting(entry, oldValueRef);
// no entry
assertEquals(0, segment.count);
assertNull(segment.remove(key, hash));
assertEquals(0, segment.count);
// same value
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
assertTrue(segment.remove(key, hash, oldValue));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
// different value
segment.setTableEntryForTesting(index, entry);
segment.count++;
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
assertFalse(segment.remove(key, hash, newValue));
assertEquals(1, segment.count);
assertSame(oldValue, segment.get(key, hash));
// cleared
assertSame(oldValue, segment.get(key, hash));
oldValueRef.clear();
assertFalse(segment.remove(key, hash, oldValue));
assertEquals(0, segment.count);
assertNull(segment.get(key, hash));
}
@SuppressWarnings("GuardedBy")
public void testExpand() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
assertEquals(1, segment.table.length());
// manually add elements to avoid expansion
int originalCount = 1024;
InternalEntry<Object, Object, ?> entry = null;
for (int i = 0; i < originalCount; i++) {
Object key = new Object();
Object value = new Object();
int hash = map.hash(key);
// chain all entries together as we only have a single bucket
entry = segment.newEntryForTesting(key, hash, entry);
segment.setValueForTesting(entry, value);
}
segment.setTableEntryForTesting(0, entry);
segment.count = originalCount;
ImmutableMap<Object, Object> originalMap = ImmutableMap.copyOf(map);
assertEquals(originalCount, originalMap.size());
assertEquals(originalMap, map);
for (int i = 1; i <= originalCount * 2; i *= 2) {
if (i > 1) {
// TODO(b/145386688): This access should be guarded by 'segment', which is not currently
// held
segment.expand();
}
assertEquals(i, segment.table.length());
assertEquals(originalCount, countLiveEntries(map));
assertEquals(originalCount, segment.count);
assertEquals(originalMap, map);
}
}
public void testRemoveFromChain() {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(createMapMaker().concurrencyLevel(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
// create 3 objects and chain them together
Object keyOne = new Object();
Object valueOne = new Object();
int hashOne = map.hash(keyOne);
InternalEntry<Object, Object, ?> entryOne = segment.newEntryForTesting(keyOne, hashOne, null);
segment.setValueForTesting(entryOne, valueOne);
Object keyTwo = new Object();
Object valueTwo = new Object();
int hashTwo = map.hash(keyTwo);
InternalEntry<Object, Object, ?> entryTwo =
segment.newEntryForTesting(keyTwo, hashTwo, entryOne);
segment.setValueForTesting(entryTwo, valueTwo);
Object keyThree = new Object();
Object valueThree = new Object();
int hashThree = map.hash(keyThree);
InternalEntry<Object, Object, ?> entryThree =
segment.newEntryForTesting(keyThree, hashThree, entryTwo);
segment.setValueForTesting(entryThree, valueThree);
// alone
assertNull(segment.removeFromChainForTesting(entryOne, entryOne));
// head
assertSame(entryOne, segment.removeFromChainForTesting(entryTwo, entryTwo));
// middle
InternalEntry<Object, Object, ?> newFirst =
segment.removeFromChainForTesting(entryThree, entryTwo);
assertSame(keyThree, newFirst.getKey());
assertSame(valueThree, newFirst.getValue());
assertEquals(hashThree, newFirst.getHash());
assertSame(entryOne, newFirst.getNext());
// tail (remaining entries are copied in reverse order)
newFirst = segment.removeFromChainForTesting(entryThree, entryOne);
assertSame(keyTwo, newFirst.getKey());
assertSame(valueTwo, newFirst.getValue());
assertEquals(hashTwo, newFirst.getHash());
newFirst = newFirst.getNext();
assertSame(keyThree, newFirst.getKey());
assertSame(valueThree, newFirst.getValue());
assertEquals(hashThree, newFirst.getHash());
assertNull(newFirst.getNext());
}
@SuppressWarnings("GuardedBy")
public void testExpand_cleanup() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
assertEquals(1, segment.table.length());
// manually add elements to avoid expansion
// 1/3 null keys, 1/3 null values
int originalCount = 1024;
InternalEntry<Object, Object, ?> entry = null;
for (int i = 0; i < originalCount; i++) {
Object key = new Object();
Object value = (i % 3 == 0) ? null : new Object();
int hash = map.hash(key);
if (i % 3 == 1) {
key = null;
}
// chain all entries together as we only have a single bucket
entry = segment.newEntryForTesting(key, hash, entry);
segment.setValueForTesting(entry, value);
}
segment.setTableEntryForTesting(0, entry);
segment.count = originalCount;
int liveCount = originalCount / 3;
assertEquals(1, segment.table.length());
assertEquals(liveCount, countLiveEntries(map));
ImmutableMap<Object, Object> originalMap = ImmutableMap.copyOf(map);
assertEquals(liveCount, originalMap.size());
// can't compare map contents until cleanup occurs
for (int i = 1; i <= originalCount * 2; i *= 2) {
if (i > 1) {
// TODO(b/145386688): This access should be guarded by 'segment', which is not currently
// held
segment.expand();
}
assertEquals(i, segment.table.length());
assertEquals(liveCount, countLiveEntries(map));
// expansion cleanup is sloppy, with a goal of avoiding unnecessary copies
assertTrue(segment.count >= liveCount);
assertTrue(segment.count <= originalCount);
assertEquals(originalMap, ImmutableMap.copyOf(map));
}
}
private static <K, V> int countLiveEntries(MapMakerInternalMap<K, V, ?, ?> map) {
int result = 0;
for (Segment<K, V, ?, ?> segment : map.segments) {
AtomicReferenceArray<? extends InternalEntry<K, V, ?>> table = segment.table;
for (int i = 0; i < table.length(); i++) {
for (InternalEntry<K, V, ?> e = table.get(i); e != null; e = e.getNext()) {
if (map.isLiveForTesting(e)) {
result++;
}
}
}
}
return result;
}
public void testClear() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
assertEquals(1, table.length());
Object key = new Object();
Object value = new Object();
int hash = map.hash(key);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
segment.setValueForTesting(entry, value);
segment.setTableEntryForTesting(0, entry);
segment.readCount.incrementAndGet();
segment.count = 1;
assertSame(entry, table.get(0));
segment.clear();
assertNull(table.get(0));
assertEquals(0, segment.readCount.get());
assertEquals(0, segment.count);
}
public void testRemoveEntry() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1));
Segment<Object, Object, ?, ?> segment = map.segments[0];
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
assertEquals(1, table.length());
Object key = new Object();
Object value = new Object();
int hash = map.hash(key);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
segment.setValueForTesting(entry, value);
// remove absent
assertFalse(segment.removeTableEntryForTesting(entry));
segment.setTableEntryForTesting(0, entry);
segment.count = 1;
assertTrue(segment.removeTableEntryForTesting(entry));
assertEquals(0, segment.count);
assertNull(table.get(0));
}
public void testClearValue() {
MapMakerInternalMap<Object, Object, ?, ?> map =
makeMap(createMapMaker().concurrencyLevel(1).initialCapacity(1).weakValues());
Segment<Object, Object, ?, ?> segment = map.segments[0];
AtomicReferenceArray<? extends InternalEntry<Object, Object, ?>> table = segment.table;
assertEquals(1, table.length());
Object key = new Object();
Object value = new Object();
int hash = map.hash(key);
InternalEntry<Object, Object, ?> entry = segment.newEntryForTesting(key, hash, null);
segment.setValueForTesting(entry, value);
WeakValueReference<Object, Object, ?> valueRef = segment.getWeakValueReferenceForTesting(entry);
// clear absent
assertFalse(segment.clearValueForTesting(key, hash, valueRef));
segment.setTableEntryForTesting(0, entry);
// don't increment count; this is used during computation
assertTrue(segment.clearValueForTesting(key, hash, valueRef));
// no notification sent with clearValue
assertEquals(0, segment.count);
assertNull(table.get(0));
// clear wrong value reference
segment.setTableEntryForTesting(0, entry);
WeakValueReference<Object, Object, ?> otherValueRef =
segment.newWeakValueReferenceForTesting(entry, value);
segment.setWeakValueReferenceForTesting(entry, otherValueRef);
assertFalse(segment.clearValueForTesting(key, hash, valueRef));
segment.setWeakValueReferenceForTesting(entry, valueRef);
assertTrue(segment.clearValueForTesting(key, hash, valueRef));
}
// reference queues
public void testDrainKeyReferenceQueueOnWrite() {
for (MapMaker maker : allWeakKeyStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker.concurrencyLevel(1));
if (maker.getKeyStrength() == Strength.WEAK) {
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
int hashOne = map.hash(keyOne);
Object valueOne = new Object();
Object keyTwo = new Object();
Object valueTwo = new Object();
map.put(keyOne, valueOne);
InternalEntry<Object, Object, ?> entry = segment.getEntry(keyOne, hashOne);
@SuppressWarnings("unchecked")
Reference<Object> reference = (Reference<Object>) entry;
reference.enqueue();
map.put(keyTwo, valueTwo);
assertFalse(map.containsKey(keyOne));
assertFalse(map.containsValue(valueOne));
assertNull(map.get(keyOne));
assertEquals(1, map.size());
assertNull(segment.getKeyReferenceQueueForTesting().poll());
}
}
}
public void testDrainValueReferenceQueueOnWrite() {
for (MapMaker maker : allWeakValueStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker.concurrencyLevel(1));
if (maker.getValueStrength() == Strength.WEAK) {
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
int hashOne = map.hash(keyOne);
Object valueOne = new Object();
Object keyTwo = new Object();
Object valueTwo = new Object();
map.put(keyOne, valueOne);
WeakValueEntry<Object, Object, ?> entry =
(WeakValueEntry<Object, Object, ?>) segment.getEntry(keyOne, hashOne);
WeakValueReference<Object, Object, ?> valueReference = entry.getValueReference();
@SuppressWarnings("unchecked")
Reference<Object> reference = (Reference<Object>) valueReference;
reference.enqueue();
map.put(keyTwo, valueTwo);
assertFalse(map.containsKey(keyOne));
assertFalse(map.containsValue(valueOne));
assertNull(map.get(keyOne));
assertEquals(1, map.size());
assertNull(segment.getValueReferenceQueueForTesting().poll());
}
}
}
public void testDrainKeyReferenceQueueOnRead() {
for (MapMaker maker : allWeakKeyStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker.concurrencyLevel(1));
if (maker.getKeyStrength() == Strength.WEAK) {
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
int hashOne = map.hash(keyOne);
Object valueOne = new Object();
Object keyTwo = new Object();
map.put(keyOne, valueOne);
InternalEntry<Object, Object, ?> entry = segment.getEntry(keyOne, hashOne);
@SuppressWarnings("unchecked")
Reference<Object> reference = (Reference<Object>) entry;
reference.enqueue();
for (int i = 0; i < SMALL_MAX_SIZE; i++) {
Object unused = map.get(keyTwo);
}
assertFalse(map.containsKey(keyOne));
assertFalse(map.containsValue(valueOne));
assertNull(map.get(keyOne));
assertEquals(0, map.size());
assertNull(segment.getKeyReferenceQueueForTesting().poll());
}
}
}
public void testDrainValueReferenceQueueOnRead() {
for (MapMaker maker : allWeakValueStrengthMakers()) {
MapMakerInternalMap<Object, Object, ?, ?> map = makeMap(maker.concurrencyLevel(1));
if (maker.getValueStrength() == Strength.WEAK) {
Segment<Object, Object, ?, ?> segment = map.segments[0];
Object keyOne = new Object();
int hashOne = map.hash(keyOne);
Object valueOne = new Object();
Object keyTwo = new Object();
map.put(keyOne, valueOne);
WeakValueEntry<Object, Object, ?> entry =
(WeakValueEntry<Object, Object, ?>) segment.getEntry(keyOne, hashOne);
WeakValueReference<Object, Object, ?> valueReference = entry.getValueReference();
@SuppressWarnings("unchecked")
Reference<Object> reference = (Reference<Object>) valueReference;
reference.enqueue();
for (int i = 0; i < SMALL_MAX_SIZE; i++) {
Object unused = map.get(keyTwo);
}
assertFalse(map.containsKey(keyOne));
assertFalse(map.containsValue(valueOne));
assertNull(map.get(keyOne));
assertEquals(0, map.size());
assertNull(segment.getValueReferenceQueueForTesting().poll());
}
}
}
// utility methods
private static Iterable<MapMaker> allWeakKeyStrengthMakers() {
return ImmutableList.of(createMapMaker().weakKeys(), createMapMaker().weakKeys().weakValues());
}
private static Iterable<MapMaker> allWeakValueStrengthMakers() {
return ImmutableList.of(
createMapMaker().weakValues(), createMapMaker().weakKeys().weakValues());
}
public void testNullParameters() throws Exception {
NullPointerTester tester = new NullPointerTester();
tester.testAllPublicInstanceMethods(makeMap(createMapMaker()));
}
}
|
googleapis/google-cloud-java | 35,673 | java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/Sku.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/channel/v1/products.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.channel.v1;
/**
*
*
* <pre>
* Represents a product's purchasable Stock Keeping Unit (SKU).
* SKUs represent the different variations of the product. For example, Google
* Workspace Business Standard and Google Workspace Business Plus are Google
* Workspace product SKUs.
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.Sku}
*/
public final class Sku extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.channel.v1.Sku)
SkuOrBuilder {
private static final long serialVersionUID = 0L;
// Use Sku.newBuilder() to construct.
private Sku(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Sku() {
name_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Sku();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ProductsProto
.internal_static_google_cloud_channel_v1_Sku_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ProductsProto
.internal_static_google_cloud_channel_v1_Sku_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.Sku.class, com.google.cloud.channel.v1.Sku.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Resource Name of the SKU.
* Format: products/{product_id}/skus/{sku_id}
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Resource Name of the SKU.
* Format: products/{product_id}/skus/{sku_id}
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MARKETING_INFO_FIELD_NUMBER = 2;
private com.google.cloud.channel.v1.MarketingInfo marketingInfo_;
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*
* @return Whether the marketingInfo field is set.
*/
@java.lang.Override
public boolean hasMarketingInfo() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*
* @return The marketingInfo.
*/
@java.lang.Override
public com.google.cloud.channel.v1.MarketingInfo getMarketingInfo() {
return marketingInfo_ == null
? com.google.cloud.channel.v1.MarketingInfo.getDefaultInstance()
: marketingInfo_;
}
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*/
@java.lang.Override
public com.google.cloud.channel.v1.MarketingInfoOrBuilder getMarketingInfoOrBuilder() {
return marketingInfo_ == null
? com.google.cloud.channel.v1.MarketingInfo.getDefaultInstance()
: marketingInfo_;
}
public static final int PRODUCT_FIELD_NUMBER = 3;
private com.google.cloud.channel.v1.Product product_;
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*
* @return Whether the product field is set.
*/
@java.lang.Override
public boolean hasProduct() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*
* @return The product.
*/
@java.lang.Override
public com.google.cloud.channel.v1.Product getProduct() {
return product_ == null ? com.google.cloud.channel.v1.Product.getDefaultInstance() : product_;
}
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*/
@java.lang.Override
public com.google.cloud.channel.v1.ProductOrBuilder getProductOrBuilder() {
return product_ == null ? com.google.cloud.channel.v1.Product.getDefaultInstance() : product_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getMarketingInfo());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(3, getProduct());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getMarketingInfo());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getProduct());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.channel.v1.Sku)) {
return super.equals(obj);
}
com.google.cloud.channel.v1.Sku other = (com.google.cloud.channel.v1.Sku) obj;
if (!getName().equals(other.getName())) return false;
if (hasMarketingInfo() != other.hasMarketingInfo()) return false;
if (hasMarketingInfo()) {
if (!getMarketingInfo().equals(other.getMarketingInfo())) return false;
}
if (hasProduct() != other.hasProduct()) return false;
if (hasProduct()) {
if (!getProduct().equals(other.getProduct())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
if (hasMarketingInfo()) {
hash = (37 * hash) + MARKETING_INFO_FIELD_NUMBER;
hash = (53 * hash) + getMarketingInfo().hashCode();
}
if (hasProduct()) {
hash = (37 * hash) + PRODUCT_FIELD_NUMBER;
hash = (53 * hash) + getProduct().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.channel.v1.Sku parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.Sku parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.Sku parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.Sku parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.Sku parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.Sku parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.Sku parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.Sku parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.Sku parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.Sku parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.Sku parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.Sku parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.channel.v1.Sku prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Represents a product's purchasable Stock Keeping Unit (SKU).
* SKUs represent the different variations of the product. For example, Google
* Workspace Business Standard and Google Workspace Business Plus are Google
* Workspace product SKUs.
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.Sku}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.Sku)
com.google.cloud.channel.v1.SkuOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ProductsProto
.internal_static_google_cloud_channel_v1_Sku_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ProductsProto
.internal_static_google_cloud_channel_v1_Sku_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.Sku.class, com.google.cloud.channel.v1.Sku.Builder.class);
}
// Construct using com.google.cloud.channel.v1.Sku.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMarketingInfoFieldBuilder();
getProductFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
marketingInfo_ = null;
if (marketingInfoBuilder_ != null) {
marketingInfoBuilder_.dispose();
marketingInfoBuilder_ = null;
}
product_ = null;
if (productBuilder_ != null) {
productBuilder_.dispose();
productBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.channel.v1.ProductsProto
.internal_static_google_cloud_channel_v1_Sku_descriptor;
}
@java.lang.Override
public com.google.cloud.channel.v1.Sku getDefaultInstanceForType() {
return com.google.cloud.channel.v1.Sku.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.channel.v1.Sku build() {
com.google.cloud.channel.v1.Sku result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.channel.v1.Sku buildPartial() {
com.google.cloud.channel.v1.Sku result = new com.google.cloud.channel.v1.Sku(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.channel.v1.Sku result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.marketingInfo_ =
marketingInfoBuilder_ == null ? marketingInfo_ : marketingInfoBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.product_ = productBuilder_ == null ? product_ : productBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.channel.v1.Sku) {
return mergeFrom((com.google.cloud.channel.v1.Sku) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.channel.v1.Sku other) {
if (other == com.google.cloud.channel.v1.Sku.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasMarketingInfo()) {
mergeMarketingInfo(other.getMarketingInfo());
}
if (other.hasProduct()) {
mergeProduct(other.getProduct());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getMarketingInfoFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getProductFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Resource Name of the SKU.
* Format: products/{product_id}/skus/{sku_id}
* </pre>
*
* <code>string name = 1;</code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Resource Name of the SKU.
* Format: products/{product_id}/skus/{sku_id}
* </pre>
*
* <code>string name = 1;</code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Resource Name of the SKU.
* Format: products/{product_id}/skus/{sku_id}
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Resource Name of the SKU.
* Format: products/{product_id}/skus/{sku_id}
* </pre>
*
* <code>string name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Resource Name of the SKU.
* Format: products/{product_id}/skus/{sku_id}
* </pre>
*
* <code>string name = 1;</code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.channel.v1.MarketingInfo marketingInfo_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.MarketingInfo,
com.google.cloud.channel.v1.MarketingInfo.Builder,
com.google.cloud.channel.v1.MarketingInfoOrBuilder>
marketingInfoBuilder_;
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*
* @return Whether the marketingInfo field is set.
*/
public boolean hasMarketingInfo() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*
* @return The marketingInfo.
*/
public com.google.cloud.channel.v1.MarketingInfo getMarketingInfo() {
if (marketingInfoBuilder_ == null) {
return marketingInfo_ == null
? com.google.cloud.channel.v1.MarketingInfo.getDefaultInstance()
: marketingInfo_;
} else {
return marketingInfoBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*/
public Builder setMarketingInfo(com.google.cloud.channel.v1.MarketingInfo value) {
if (marketingInfoBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
marketingInfo_ = value;
} else {
marketingInfoBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*/
public Builder setMarketingInfo(
com.google.cloud.channel.v1.MarketingInfo.Builder builderForValue) {
if (marketingInfoBuilder_ == null) {
marketingInfo_ = builderForValue.build();
} else {
marketingInfoBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*/
public Builder mergeMarketingInfo(com.google.cloud.channel.v1.MarketingInfo value) {
if (marketingInfoBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& marketingInfo_ != null
&& marketingInfo_ != com.google.cloud.channel.v1.MarketingInfo.getDefaultInstance()) {
getMarketingInfoBuilder().mergeFrom(value);
} else {
marketingInfo_ = value;
}
} else {
marketingInfoBuilder_.mergeFrom(value);
}
if (marketingInfo_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*/
public Builder clearMarketingInfo() {
bitField0_ = (bitField0_ & ~0x00000002);
marketingInfo_ = null;
if (marketingInfoBuilder_ != null) {
marketingInfoBuilder_.dispose();
marketingInfoBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*/
public com.google.cloud.channel.v1.MarketingInfo.Builder getMarketingInfoBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getMarketingInfoFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*/
public com.google.cloud.channel.v1.MarketingInfoOrBuilder getMarketingInfoOrBuilder() {
if (marketingInfoBuilder_ != null) {
return marketingInfoBuilder_.getMessageOrBuilder();
} else {
return marketingInfo_ == null
? com.google.cloud.channel.v1.MarketingInfo.getDefaultInstance()
: marketingInfo_;
}
}
/**
*
*
* <pre>
* Marketing information for the SKU.
* </pre>
*
* <code>.google.cloud.channel.v1.MarketingInfo marketing_info = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.MarketingInfo,
com.google.cloud.channel.v1.MarketingInfo.Builder,
com.google.cloud.channel.v1.MarketingInfoOrBuilder>
getMarketingInfoFieldBuilder() {
if (marketingInfoBuilder_ == null) {
marketingInfoBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.MarketingInfo,
com.google.cloud.channel.v1.MarketingInfo.Builder,
com.google.cloud.channel.v1.MarketingInfoOrBuilder>(
getMarketingInfo(), getParentForChildren(), isClean());
marketingInfo_ = null;
}
return marketingInfoBuilder_;
}
private com.google.cloud.channel.v1.Product product_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.Product,
com.google.cloud.channel.v1.Product.Builder,
com.google.cloud.channel.v1.ProductOrBuilder>
productBuilder_;
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*
* @return Whether the product field is set.
*/
public boolean hasProduct() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*
* @return The product.
*/
public com.google.cloud.channel.v1.Product getProduct() {
if (productBuilder_ == null) {
return product_ == null
? com.google.cloud.channel.v1.Product.getDefaultInstance()
: product_;
} else {
return productBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*/
public Builder setProduct(com.google.cloud.channel.v1.Product value) {
if (productBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
product_ = value;
} else {
productBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*/
public Builder setProduct(com.google.cloud.channel.v1.Product.Builder builderForValue) {
if (productBuilder_ == null) {
product_ = builderForValue.build();
} else {
productBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*/
public Builder mergeProduct(com.google.cloud.channel.v1.Product value) {
if (productBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& product_ != null
&& product_ != com.google.cloud.channel.v1.Product.getDefaultInstance()) {
getProductBuilder().mergeFrom(value);
} else {
product_ = value;
}
} else {
productBuilder_.mergeFrom(value);
}
if (product_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*/
public Builder clearProduct() {
bitField0_ = (bitField0_ & ~0x00000004);
product_ = null;
if (productBuilder_ != null) {
productBuilder_.dispose();
productBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*/
public com.google.cloud.channel.v1.Product.Builder getProductBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getProductFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*/
public com.google.cloud.channel.v1.ProductOrBuilder getProductOrBuilder() {
if (productBuilder_ != null) {
return productBuilder_.getMessageOrBuilder();
} else {
return product_ == null
? com.google.cloud.channel.v1.Product.getDefaultInstance()
: product_;
}
}
/**
*
*
* <pre>
* Product the SKU is associated with.
* </pre>
*
* <code>.google.cloud.channel.v1.Product product = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.Product,
com.google.cloud.channel.v1.Product.Builder,
com.google.cloud.channel.v1.ProductOrBuilder>
getProductFieldBuilder() {
if (productBuilder_ == null) {
productBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.Product,
com.google.cloud.channel.v1.Product.Builder,
com.google.cloud.channel.v1.ProductOrBuilder>(
getProduct(), getParentForChildren(), isClean());
product_ = null;
}
return productBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.Sku)
}
// @@protoc_insertion_point(class_scope:google.cloud.channel.v1.Sku)
private static final com.google.cloud.channel.v1.Sku DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.channel.v1.Sku();
}
public static com.google.cloud.channel.v1.Sku getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Sku> PARSER =
new com.google.protobuf.AbstractParser<Sku>() {
@java.lang.Override
public Sku parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Sku> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Sku> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.channel.v1.Sku getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
hibernate/hibernate-orm | 34,383 | hibernate-core/src/main/java/org/hibernate/engine/spi/SessionDelegatorBaseImpl.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.engine.spi;
import jakarta.persistence.CacheRetrieveMode;
import jakarta.persistence.CacheStoreMode;
import jakarta.persistence.EntityGraph;
import jakarta.persistence.EntityManagerFactory;
import jakarta.persistence.FindOption;
import jakarta.persistence.FlushModeType;
import jakarta.persistence.LockModeType;
import jakarta.persistence.LockOption;
import jakarta.persistence.RefreshOption;
import jakarta.persistence.TypedQueryReference;
import jakarta.persistence.criteria.CriteriaDelete;
import jakarta.persistence.criteria.CriteriaQuery;
import jakarta.persistence.criteria.CriteriaSelect;
import jakarta.persistence.criteria.CriteriaUpdate;
import jakarta.persistence.metamodel.EntityType;
import jakarta.persistence.metamodel.Metamodel;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.CacheMode;
import org.hibernate.Filter;
import org.hibernate.FlushMode;
import org.hibernate.HibernateException;
import org.hibernate.IdentifierLoadAccess;
import org.hibernate.Interceptor;
import org.hibernate.LobHelper;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.MultiIdentifierLoadAccess;
import org.hibernate.NaturalIdLoadAccess;
import org.hibernate.NaturalIdMultiLoadAccess;
import org.hibernate.ReplicationMode;
import org.hibernate.SessionEventListener;
import org.hibernate.SharedSessionBuilder;
import org.hibernate.SharedStatelessSessionBuilder;
import org.hibernate.SimpleNaturalIdLoadAccess;
import org.hibernate.Transaction;
import org.hibernate.UnknownProfileException;
import org.hibernate.bytecode.enhance.spi.interceptor.SessionAssociationMarkers;
import org.hibernate.cache.spi.CacheTransactionSynchronization;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.jdbc.LobCreator;
import org.hibernate.engine.jdbc.connections.spi.JdbcConnectionAccess;
import org.hibernate.engine.jdbc.spi.JdbcCoordinator;
import org.hibernate.engine.jdbc.spi.JdbcServices;
import org.hibernate.event.monitor.spi.EventMonitor;
import org.hibernate.event.spi.EventSource;
import org.hibernate.graph.RootGraph;
import org.hibernate.graph.spi.RootGraphImplementor;
import org.hibernate.jdbc.ReturningWork;
import org.hibernate.jdbc.Work;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.procedure.ProcedureCall;
import org.hibernate.query.MutationQuery;
import org.hibernate.query.SelectionQuery;
import org.hibernate.query.criteria.HibernateCriteriaBuilder;
import org.hibernate.query.criteria.JpaCriteriaInsert;
import org.hibernate.query.spi.QueryImplementor;
import org.hibernate.query.spi.QueryProducerImplementor;
import org.hibernate.query.sql.spi.NativeQueryImplementor;
import org.hibernate.resource.jdbc.spi.JdbcSessionContext;
import org.hibernate.resource.transaction.spi.TransactionCoordinator;
import org.hibernate.stat.SessionStatistics;
import org.hibernate.type.format.FormatMapper;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.UUID;
/**
* A wrapper class that delegates all method invocations to a delegate instance of
* {@link SessionImplementor}. This is useful for custom implementations of that
* API, so that only some methods need to be overridden
* <p>
* (Used by Hibernate Search).
*
* @author Sanne Grinovero
*/
@SuppressWarnings("deprecation")
public class SessionDelegatorBaseImpl implements SessionImplementor {
protected final SessionImplementor delegate;
public SessionDelegatorBaseImpl(SessionImplementor delegate) {
this.delegate = delegate;
}
/**
* Returns the delegate session.
* <p>
* @apiNote This returns a different object to the {@link #getDelegate()}
* method inherited from {@link jakarta.persistence.EntityManager}.
*
* @see SessionDelegatorBaseImpl#getDelegate()
*/
protected SessionImplementor delegate() {
return delegate;
}
@Override
public <T> T execute(Callback<T> callback) {
return delegate.execute( callback );
}
@Override
public SharedStatelessSessionBuilder statelessWithOptions() {
return delegate.statelessWithOptions();
}
@Override
public String getTenantIdentifier() {
return delegate.getTenantIdentifier();
}
@Override
public Object getTenantIdentifierValue() {
return delegate.getTenantIdentifierValue();
}
@Override
public UUID getSessionIdentifier() {
return delegate.getSessionIdentifier();
}
@Override
public JdbcConnectionAccess getJdbcConnectionAccess() {
return delegate.getJdbcConnectionAccess();
}
@Override
public EntityKey generateEntityKey(Object id, EntityPersister persister) {
return delegate.generateEntityKey( id, persister );
}
@Override
public Interceptor getInterceptor() {
return delegate.getInterceptor();
}
@Override
public boolean isTransactionInProgress() {
return delegate.isTransactionInProgress();
}
@Override
public void checkTransactionNeededForUpdateOperation(String exceptionMessage) {
delegate.checkTransactionNeededForUpdateOperation( exceptionMessage );
}
@Override
public void initializeCollection(PersistentCollection<?> collection, boolean writing) throws HibernateException {
delegate.initializeCollection( collection, writing );
}
@Override
public Object internalLoad(String entityName, Object id, boolean eager, boolean nullable) throws HibernateException {
return delegate.internalLoad( entityName, id, eager, nullable );
}
@Override
public Object immediateLoad(String entityName, Object id) throws HibernateException {
return delegate.immediateLoad( entityName, id );
}
@Override
public SessionFactoryImplementor getFactory() {
return delegate.getFactory();
}
@Override
public EntityPersister getEntityPersister(@Nullable String entityName, Object object) throws HibernateException {
return delegate.getEntityPersister( entityName, object );
}
@Override
public Object getEntityUsingInterceptor(EntityKey key) throws HibernateException {
return delegate.getEntityUsingInterceptor( key );
}
@Override
public Object getContextEntityIdentifier(Object object) {
return delegate.getContextEntityIdentifier( object );
}
@Override
public String bestGuessEntityName(Object object) {
return delegate.bestGuessEntityName( object );
}
@Override
public String guessEntityName(Object entity) throws HibernateException {
return delegate.guessEntityName( entity );
}
@Override @Deprecated
public Object instantiate(String entityName, Object id) throws HibernateException {
return delegate.instantiate( entityName, id );
}
@Override
public PersistenceContext getPersistenceContext() {
return delegate.getPersistenceContext();
}
@Override
public CacheMode getCacheMode() {
return delegate.getCacheMode();
}
@Override
public CacheRetrieveMode getCacheRetrieveMode() {
return delegate.getCacheRetrieveMode();
}
@Override
public CacheStoreMode getCacheStoreMode() {
return delegate.getCacheStoreMode();
}
@Override
public void setCacheMode(CacheMode cacheMode) {
delegate.setCacheMode( cacheMode );
}
@Override
public void setCacheStoreMode(CacheStoreMode cacheStoreMode) {
delegate.setCacheStoreMode( cacheStoreMode );
}
@Override
public void setCacheRetrieveMode(CacheRetrieveMode cacheRetrieveMode) {
delegate.setCacheRetrieveMode( cacheRetrieveMode );
}
@Override
public void setCriteriaCopyTreeEnabled(boolean jpaCriteriaCopyComplianceEnabled) {
delegate.setCriteriaCopyTreeEnabled( jpaCriteriaCopyComplianceEnabled );
}
@Override
public boolean isCriteriaCopyTreeEnabled() {
return delegate.isCriteriaCopyTreeEnabled();
}
@Override
public boolean isCriteriaPlanCacheEnabled() {
return delegate.isCriteriaPlanCacheEnabled();
}
@Override
public void setCriteriaPlanCacheEnabled(boolean jpaCriteriaCacheEnabled) {
delegate.setCriteriaPlanCacheEnabled( jpaCriteriaCacheEnabled );
}
@Override
public boolean getNativeJdbcParametersIgnored() {
return delegate.getNativeJdbcParametersIgnored();
}
@Override
public void setNativeJdbcParametersIgnored(boolean nativeJdbcParametersIgnored) {
delegate.setNativeJdbcParametersIgnored( nativeJdbcParametersIgnored );
}
@Override
public boolean isOpen() {
return delegate.isOpen();
}
@Override
public boolean isConnected() {
return delegate.isConnected();
}
@Override
public void checkOpen(boolean markForRollbackIfClosed) {
delegate.checkOpen( markForRollbackIfClosed );
}
@Override
public void markForRollbackOnly() {
delegate.markForRollbackOnly();
}
@Override
public FlushModeType getFlushMode() {
return delegate.getFlushMode();
}
@Override
public void setFlushMode(FlushModeType flushModeType) {
delegate.setFlushMode( flushModeType );
}
@Override
public void setHibernateFlushMode(FlushMode flushMode) {
delegate.setHibernateFlushMode( flushMode );
}
@Override
public FlushMode getHibernateFlushMode() {
return delegate.getHibernateFlushMode();
}
@Override
public void lock(Object entity, LockModeType lockMode) {
delegate.lock( entity, lockMode );
}
@Override
public void lock(Object entity, LockModeType lockMode, Map<String, Object> properties) {
delegate.lock( entity, lockMode, properties );
}
@Override
public void lock(Object entity, LockModeType lockMode, LockOption... options) {
delegate.lock( entity, lockMode, options );
}
@Override
public void flush() {
delegate.flush();
}
@Override
public boolean isEventSource() {
return delegate.isEventSource();
}
@Override
public EventSource asEventSource() {
return delegate.asEventSource();
}
@Override
public void afterScrollOperation() {
delegate.afterScrollOperation();
}
@Override
public TransactionCoordinator getTransactionCoordinator() {
return delegate.getTransactionCoordinator();
}
@Override
public JdbcCoordinator getJdbcCoordinator() {
return delegate.getJdbcCoordinator();
}
@Override
public JdbcServices getJdbcServices() {
return delegate.getJdbcServices();
}
@Override
public JdbcSessionContext getJdbcSessionContext() {
return delegate.getJdbcSessionContext();
}
@Override
public boolean isClosed() {
return delegate.isClosed();
}
@Override
public void checkOpen() {
delegate.checkOpen();
}
@Override
public boolean isOpenOrWaitingForAutoClose() {
return delegate.isOpenOrWaitingForAutoClose();
}
@Override
public boolean shouldAutoJoinTransaction() {
return delegate.shouldAutoJoinTransaction();
}
@Override
public boolean isAutoCloseSessionEnabled() {
return delegate.isAutoCloseSessionEnabled();
}
@Override
public LoadQueryInfluencers getLoadQueryInfluencers() {
return delegate.getLoadQueryInfluencers();
}
@Override
public ExceptionConverter getExceptionConverter() {
return delegate.getExceptionConverter();
}
@Override
public PersistenceContext getPersistenceContextInternal() {
return delegate.getPersistenceContextInternal();
}
@Override
public boolean autoFlushIfRequired(Set<String> querySpaces) throws HibernateException {
return delegate.autoFlushIfRequired( querySpaces );
}
@Override
public boolean autoFlushIfRequired(Set<String> querySpaces, boolean skipPreFlush)
throws HibernateException {
return delegate.autoFlushIfRequired( querySpaces, skipPreFlush );
}
@Override
public void autoPreFlush() {
delegate.autoPreFlush();
}
@Override
public void afterOperation(boolean success) {
delegate.afterOperation( success );
}
@Override
public SessionEventListenerManager getEventListenerManager() {
return delegate.getEventListenerManager();
}
@Override
public Transaction accessTransaction() {
return delegate.accessTransaction();
}
@Override
public Transaction getCurrentTransaction() {
return delegate.getCurrentTransaction();
}
@Override
public Transaction beginTransaction() {
return delegate.beginTransaction();
}
@Override
public Transaction getTransaction() {
return delegate.getTransaction();
}
@Override
public void startTransactionBoundary() {
delegate.startTransactionBoundary();
}
@Override
public CacheTransactionSynchronization getCacheTransactionSynchronization() {
return delegate.getCacheTransactionSynchronization();
}
@Override
public void afterTransactionBegin() {
delegate.afterTransactionBegin();
}
@Override
public void beforeTransactionCompletion() {
delegate.beforeTransactionCompletion();
}
@Override
public void afterTransactionCompletion(boolean successful, boolean delayed) {
delegate.afterTransactionCompletion( successful, delayed );
}
@Override
public void flushBeforeTransactionCompletion() {
delegate.flushBeforeTransactionCompletion();
}
@Override
public EntityManagerFactory getEntityManagerFactory() {
return delegate.getFactory();
}
@Override
public HibernateCriteriaBuilder getCriteriaBuilder() {
return delegate.getCriteriaBuilder();
}
@Override
public Metamodel getMetamodel() {
return delegate.getMetamodel();
}
@Override
public <T> RootGraphImplementor<T> createEntityGraph(Class<T> rootType) {
return delegate.createEntityGraph( rootType );
}
@Override
public RootGraphImplementor<?> createEntityGraph(String graphName) {
return delegate.createEntityGraph( graphName );
}
@Override
public <T> RootGraph<T> createEntityGraph(Class<T> rootType, String graphName) {
return delegate.createEntityGraph( rootType, graphName );
}
@Override
public RootGraphImplementor<?> getEntityGraph(String graphName) {
return delegate.getEntityGraph( graphName );
}
@Override
public <T> QueryImplementor<T> createQuery(CriteriaSelect<T> selectQuery) {
return delegate.createQuery( selectQuery );
}
@Override
public <T> List<EntityGraph<? super T>> getEntityGraphs(Class<T> entityClass) {
return delegate.getEntityGraphs( entityClass );
}
private QueryProducerImplementor queryDelegate() {
return delegate;
}
@Override
public MutationQuery createMutationQuery(@SuppressWarnings("rawtypes") CriteriaUpdate updateQuery) {
//noinspection resource
return delegate().createMutationQuery( updateQuery );
}
@Override
public MutationQuery createMutationQuery(@SuppressWarnings("rawtypes") CriteriaDelete deleteQuery) {
//noinspection resource
return delegate().createMutationQuery( deleteQuery );
}
@Override
public MutationQuery createMutationQuery(@SuppressWarnings("rawtypes") JpaCriteriaInsert insert) {
//noinspection resource
return delegate().createMutationQuery( insert );
}
@Override
public <T> QueryImplementor<T> createQuery(CriteriaQuery<T> criteriaQuery) {
return queryDelegate().createQuery( criteriaQuery );
}
@Override
public @SuppressWarnings("rawtypes") QueryImplementor createQuery(CriteriaUpdate updateQuery) {
return queryDelegate().createQuery( updateQuery );
}
@Override
public @SuppressWarnings("rawtypes") QueryImplementor createQuery(CriteriaDelete deleteQuery) {
return queryDelegate().createQuery( deleteQuery );
}
@Override
public <T> QueryImplementor<T> createQuery(TypedQueryReference<T> typedQueryReference) {
return queryDelegate().createQuery( typedQueryReference );
}
@Override
public @SuppressWarnings("rawtypes") QueryImplementor getNamedQuery(String name) {
return queryDelegate().getNamedQuery( name );
}
@Override
public @SuppressWarnings("rawtypes") NativeQueryImplementor getNamedNativeQuery(String name) {
return queryDelegate().getNamedNativeQuery( name );
}
@Override
public @SuppressWarnings("rawtypes") NativeQueryImplementor getNamedNativeQuery(String name, String resultSetMapping) {
return queryDelegate().getNamedNativeQuery( name, resultSetMapping );
}
@Override @SuppressWarnings("rawtypes")
public QueryImplementor createQuery(String queryString) {
return queryDelegate().createQuery( queryString );
}
@Override
public SelectionQuery<?> createSelectionQuery(String hqlString) {
return queryDelegate().createSelectionQuery( hqlString );
}
@Override
public <R> SelectionQuery<R> createSelectionQuery(String hqlString, Class<R> resultType) {
return queryDelegate().createSelectionQuery( hqlString, resultType );
}
@Override
public <R> SelectionQuery<R> createSelectionQuery(String hqlString, EntityGraph<R> resultGraph) {
return queryDelegate().createSelectionQuery( hqlString, resultGraph );
}
@Override
public <R> SelectionQuery<R> createSelectionQuery(CriteriaQuery<R> criteria) {
return queryDelegate().createSelectionQuery( criteria );
}
@Override
public <T> QueryImplementor<T> createQuery(String queryString, Class<T> resultType) {
return queryDelegate().createQuery( queryString, resultType );
}
@Override
public @SuppressWarnings("rawtypes") QueryImplementor createNamedQuery(String name) {
return queryDelegate().createNamedQuery( name );
}
@Override
public <T> QueryImplementor<T> createNamedQuery(String name, Class<T> resultClass) {
return queryDelegate().createNamedQuery( name, resultClass );
}
@Override
public SelectionQuery<?> createNamedSelectionQuery(String name) {
//noinspection resource
return delegate().createNamedSelectionQuery( name );
}
@Override
public <R> SelectionQuery<R> createNamedSelectionQuery(String name, Class<R> resultType) {
//noinspection resource
return delegate().createNamedSelectionQuery( name, resultType );
}
@Override
public @SuppressWarnings("rawtypes") NativeQueryImplementor createNativeQuery(String sqlString) {
return queryDelegate().createNativeQuery( sqlString );
}
@Override @SuppressWarnings({"rawtypes", "unchecked"})
//note: we're doing something a bit funny here to work around
// the clashing signatures declared by the supertypes
public NativeQueryImplementor createNativeQuery(String sqlString, Class resultClass) {
return queryDelegate().createNativeQuery( sqlString, resultClass );
}
@Override
public <T> NativeQueryImplementor<T> createNativeQuery(String sqlString, Class<T> resultClass, String tableAlias) {
return queryDelegate().createNativeQuery( sqlString, resultClass, tableAlias );
}
@Override
public @SuppressWarnings("rawtypes") NativeQueryImplementor createNativeQuery(String sqlString, String resultSetMappingName) {
return queryDelegate().createNativeQuery( sqlString, resultSetMappingName );
}
@Override
public <T> NativeQueryImplementor<T> createNativeQuery(String sqlString, String resultSetMappingName, Class<T> resultClass) {
return queryDelegate().createNativeQuery( sqlString, resultSetMappingName, resultClass );
}
@Override
public MutationQuery createMutationQuery(String statementString) {
return delegate.createMutationQuery( statementString );
}
@Override
public MutationQuery createNamedMutationQuery(String name) {
return delegate.createNamedMutationQuery( name );
}
@Override
public MutationQuery createNativeMutationQuery(String sqlString) {
return delegate.createNativeMutationQuery( sqlString );
}
@Override
public ProcedureCall createNamedStoredProcedureQuery(String name) {
return delegate.createNamedStoredProcedureQuery( name );
}
@Override
public ProcedureCall createStoredProcedureQuery(String procedureName) {
return delegate.createStoredProcedureQuery( procedureName );
}
@Override
public ProcedureCall createStoredProcedureQuery(String procedureName, Class... resultClasses) {
return delegate.createStoredProcedureQuery( procedureName, resultClasses );
}
@Override
public ProcedureCall createStoredProcedureQuery(String procedureName, String... resultSetMappings) {
return delegate.createStoredProcedureQuery( procedureName, resultSetMappings );
}
@Override
public void prepareForQueryExecution(boolean requiresTxn) {
delegate.prepareForQueryExecution( requiresTxn );
}
@Override
public void joinTransaction() {
delegate.joinTransaction();
}
@Override
public boolean isJoinedToTransaction() {
return delegate.isJoinedToTransaction();
}
@Override
public <T> T unwrap(Class<T> cls) {
return delegate.unwrap( cls );
}
/**
* This is the implementation of {@link jakarta.persistence.EntityManager#getDelegate()}.
* It returns this object and <em>not</em> what we call the "delegate" session here.
* To get the delegate session, use {@link #delegate()} instead.
*
* @see SessionDelegatorBaseImpl#delegate()
*/
@Override
public Object getDelegate() {
return this;
}
@Override
public ProcedureCall getNamedProcedureCall(String name) {
return delegate.getNamedProcedureCall( name );
}
@Override
public ProcedureCall createStoredProcedureCall(String procedureName) {
return delegate.createStoredProcedureCall( procedureName );
}
@Override
public ProcedureCall createStoredProcedureCall(String procedureName, Class<?>... resultClasses) {
return delegate.createStoredProcedureCall( procedureName, resultClasses );
}
@Override
public ProcedureCall createStoredProcedureCall(String procedureName, String... resultSetMappings) {
return delegate.createStoredProcedureCall( procedureName, resultSetMappings );
}
@Override
public SharedSessionBuilder sessionWithOptions() {
return delegate.sessionWithOptions();
}
@Override
public SessionFactoryImplementor getSessionFactory() {
return delegate.getSessionFactory();
}
@Override
public void close() throws HibernateException {
delegate.close();
}
@Override
public void cancelQuery() throws HibernateException {
delegate.cancelQuery();
}
@Override
public boolean isDirty() throws HibernateException {
return delegate.isDirty();
}
@Override
public boolean isDefaultReadOnly() {
return delegate.isDefaultReadOnly();
}
@Override
public void setDefaultReadOnly(boolean readOnly) {
delegate.setDefaultReadOnly( readOnly );
}
@Override
public Object getIdentifier(Object object) {
return delegate.getIdentifier( object );
}
@Override
public boolean contains(String entityName, Object object) {
return delegate.contains( entityName, object );
}
@Override
public boolean contains(Object object) {
return delegate.contains( object );
}
@Override
public LockModeType getLockMode(Object entity) {
return delegate.getLockMode( entity );
}
@Override
public void setProperty(String propertyName, Object value) {
delegate.setProperty( propertyName, value );
}
@Override
public Map<String, Object> getProperties() {
return delegate.getProperties();
}
@Override
public void evict(Object object) {
delegate.evict( object );
}
@Override
public void load(Object object, Object id) {
delegate.load( object, id );
}
@Override
public void replicate(Object object, ReplicationMode replicationMode) {
delegate.replicate( object, replicationMode );
}
@Override
public void replicate(String entityName, Object object, ReplicationMode replicationMode) {
delegate.replicate( entityName, object, replicationMode );
}
@Override
public <T> T merge(T object) {
return delegate.merge( object );
}
@Override
public <T> T merge(String entityName, T object) {
return delegate.merge( entityName, object );
}
@Override
public <T> T merge(T object, EntityGraph<?> loadGraph) {
return delegate.merge( object, loadGraph );
}
@Override
public void persist(Object object) {
delegate.persist( object );
}
@Override
public void remove(Object entity) {
delegate.remove( entity );
}
@Override
public <T> @Nullable T find(Class<T> entityClass, Object primaryKey) {
return delegate.find( entityClass, primaryKey );
}
@Override
public <T> @Nullable T find(Class<T> entityClass, Object primaryKey, Map<String, Object> properties) {
return delegate.find( entityClass, primaryKey, properties );
}
@Override
public <T> @Nullable T find(Class<T> entityClass, Object primaryKey, LockModeType lockMode) {
return delegate.find( entityClass, primaryKey, lockMode );
}
@Override
public <T> @Nullable T find(Class<T> entityClass, Object primaryKey, LockModeType lockMode, Map<String, Object> properties) {
return delegate.find( entityClass, primaryKey, lockMode, properties );
}
@Override
public <T> T find(Class<T> entityClass, Object primaryKey, FindOption... options) {
return delegate.find( entityClass, primaryKey, options );
}
@Override
public <T> T find(EntityGraph<T> entityGraph, Object primaryKey, FindOption... options) {
return delegate.find( entityGraph, primaryKey, options );
}
@Override
public Object find(String entityName, Object primaryKey) {
return delegate.find( entityName, primaryKey );
}
@Override
public Object find(String entityName, Object primaryKey, FindOption... options) {
return delegate.find( entityName, primaryKey, options );
}
@Override
public <T> T getReference(Class<T> entityClass, Object id) {
return delegate.getReference( entityClass, id );
}
@Override
public Object getReference(String entityName, Object id) {
return delegate.getReference( entityName, id );
}
@Override
public void persist(String entityName, Object object) {
delegate.persist( entityName, object );
}
@Override
public void lock(Object object, LockMode lockMode) {
delegate.lock( object, lockMode );
}
@Override
public void lock(Object object, LockMode lockMode, LockOption... lockOptions) {
delegate.lock( object, lockMode, lockOptions );
}
@Override
public void lock(String entityName, Object object, LockOptions lockOptions) {
delegate.lock( entityName, object, lockOptions );
}
@Override
public void lock(Object object, LockOptions lockOptions) {
delegate.lock( object, lockOptions );
}
@Override
public void refresh(Object object) {
delegate.refresh( object );
}
@Override
public void refresh(Object entity, Map<String, Object> properties) {
delegate.refresh( entity, properties );
}
@Override
public void refresh(Object entity, LockModeType lockMode) {
delegate.refresh( entity, lockMode );
}
@Override
public void refresh(Object entity, LockModeType lockMode, Map<String, Object> properties) {
delegate.refresh( entity, lockMode, properties );
}
@Override
public void refresh(Object entity, RefreshOption... options) {
delegate.refresh( entity, options );
}
@Override
public void refresh(Object object, LockOptions lockOptions) {
delegate.refresh( object, lockOptions );
}
@Override
public LockMode getCurrentLockMode(Object object) {
return delegate.getCurrentLockMode( object );
}
@Override
public void clear() {
delegate.clear();
}
@Override
public void detach(Object entity) {
delegate.detach( entity );
}
@Override
public <E> List<E> findMultiple(Class<E> entityType, List<?> ids, FindOption... options) {
return delegate.findMultiple( entityType, ids, options );
}
@Override
public <E> List<E> findMultiple(EntityGraph<E> entityGraph, List<?> ids, FindOption... options) {
return delegate.findMultiple( entityGraph, ids, options );
}
@Override
public <T> T get(Class<T> theClass, Object id) {
return delegate.get( theClass, id );
}
@Override
public <T> T get(Class<T> theClass, Object id, LockMode lockMode) {
return delegate.get( theClass, id, lockMode );
}
@Override
public Object get(String entityName, Object id) {
return delegate.get( entityName, id );
}
@Override
public Object get(String entityName, Object id, LockMode lockMode) {
return delegate.get( entityName, id, lockMode );
}
@Override
public <T> T get(Class<T> entityType, Object id, LockOptions lockOptions) {
return delegate.get( entityType, id, lockOptions );
}
@Override
public Object get(String entityName, Object id, LockOptions lockOptions) {
return delegate.get( entityName, id, lockOptions );
}
@Override
public String getEntityName(Object object) {
return delegate.getEntityName( object );
}
@Override
public <T> T getReference(T object) {
return delegate.getReference( object );
}
@Override
public <T> IdentifierLoadAccess<T> byId(String entityName) {
return delegate.byId( entityName );
}
@Override
public <T> MultiIdentifierLoadAccess<T> byMultipleIds(Class<T> entityClass) {
return delegate.byMultipleIds( entityClass );
}
@Override
public <T> MultiIdentifierLoadAccess<T> byMultipleIds(String entityName) {
return delegate.byMultipleIds( entityName );
}
@Override
public <T> IdentifierLoadAccess<T> byId(Class<T> entityClass) {
return delegate.byId( entityClass );
}
@Override
public <T> NaturalIdLoadAccess<T> byNaturalId(String entityName) {
return delegate.byNaturalId( entityName );
}
@Override
public <T> NaturalIdLoadAccess<T> byNaturalId(Class<T> entityClass) {
return delegate.byNaturalId( entityClass );
}
@Override
public <T> SimpleNaturalIdLoadAccess<T> bySimpleNaturalId(String entityName) {
return delegate.bySimpleNaturalId( entityName );
}
@Override
public <T> SimpleNaturalIdLoadAccess<T> bySimpleNaturalId(Class<T> entityClass) {
return delegate.bySimpleNaturalId( entityClass );
}
@Override
public <T> NaturalIdMultiLoadAccess<T> byMultipleNaturalId(Class<T> entityClass) {
return delegate.byMultipleNaturalId( entityClass );
}
@Override
public <T> NaturalIdMultiLoadAccess<T> byMultipleNaturalId(String entityName) {
return delegate.byMultipleNaturalId( entityName );
}
@Override
public Filter enableFilter(String filterName) {
return delegate.enableFilter( filterName );
}
@Override
public Filter getEnabledFilter(String filterName) {
return delegate.getEnabledFilter( filterName );
}
@Override
public void disableFilter(String filterName) {
delegate.disableFilter( filterName );
}
@Override
public SessionStatistics getStatistics() {
return delegate.getStatistics();
}
@Override
public boolean isReadOnly(Object entityOrProxy) {
return delegate.isReadOnly( entityOrProxy );
}
@Override
public void setReadOnly(Object entityOrProxy, boolean readOnly) {
delegate.setReadOnly( entityOrProxy, readOnly );
}
@Override
public void doWork(Work work) throws HibernateException {
delegate.doWork( work );
}
@Override
public <T> T doReturningWork(ReturningWork<T> work) throws HibernateException {
return delegate.doReturningWork( work );
}
@Override
public boolean isFetchProfileEnabled(String name) throws UnknownProfileException {
return delegate.isFetchProfileEnabled( name );
}
@Override
public void enableFetchProfile(String name) throws UnknownProfileException {
delegate.enableFetchProfile( name );
}
@Override
public void disableFetchProfile(String name) throws UnknownProfileException {
delegate.disableFetchProfile( name );
}
@Override
public LobHelper getLobHelper() {
return delegate.getLobHelper();
}
@Override
public Collection<?> getManagedEntities() {
return delegate.getManagedEntities();
}
@Override
public Collection<?> getManagedEntities(String entityName) {
return delegate.getManagedEntities( entityName );
}
@Override
public <E> Collection<E> getManagedEntities(Class<E> entityType) {
return delegate.getManagedEntities( entityType );
}
@Override
public <E> Collection<E> getManagedEntities(EntityType<E> entityType) {
return delegate.getManagedEntities( entityType );
}
@Override
public void addEventListeners(SessionEventListener... listeners) {
delegate.addEventListeners( listeners );
}
@Override
public ActionQueue getActionQueue() {
return delegate.getActionQueue();
}
@Override
public TransactionCompletionCallbacks getTransactionCompletionCallbacks() {
return delegate.getTransactionCompletionCallbacks();
}
@Override
public TransactionCompletionCallbacksImplementor getTransactionCompletionCallbacksImplementor() {
return delegate.getTransactionCompletionCallbacksImplementor();
}
@Override
public Object instantiate(EntityPersister persister, Object id) throws HibernateException {
return delegate.instantiate( persister, id );
}
@Override
public void forceFlush(EntityEntry e) throws HibernateException {
delegate.forceFlush( e );
}
@Override
public void forceFlush(EntityKey e) throws HibernateException {
delegate.forceFlush( e );
}
@Override
public SessionImplementor getSession() {
return this;
}
@Override
public boolean useStreamForLobBinding() {
return delegate.useStreamForLobBinding();
}
@Override
public int getPreferredSqlTypeCodeForBoolean() {
return delegate.getPreferredSqlTypeCodeForBoolean();
}
@Override
public LobCreator getLobCreator() {
return delegate.getLobCreator();
}
@Override
public Integer getJdbcBatchSize() {
return delegate.getJdbcBatchSize();
}
@Override
public EventMonitor getEventMonitor() {
return delegate.getEventMonitor();
}
@Override
public void setJdbcBatchSize(Integer jdbcBatchSize) {
delegate.setJdbcBatchSize( jdbcBatchSize );
}
@Override
public boolean isSubselectFetchingEnabled() {
return delegate.isSubselectFetchingEnabled();
}
@Override
public void setSubselectFetchingEnabled(boolean enabled) {
delegate.setSubselectFetchingEnabled( enabled );
}
@Override
public int getFetchBatchSize() {
return delegate.getFetchBatchSize();
}
@Override
public void setFetchBatchSize(int batchSize) {
delegate.setFetchBatchSize( batchSize );
}
@Override
public TimeZone getJdbcTimeZone() {
return delegate.getJdbcTimeZone();
}
@Override
public FormatMapper getJsonFormatMapper() {
return delegate.getJsonFormatMapper();
}
@Override
public FormatMapper getXmlFormatMapper() {
return delegate.getXmlFormatMapper();
}
@Override
public Object loadFromSecondLevelCache(EntityPersister persister, EntityKey entityKey, Object instanceToLoad, LockMode lockMode) {
return delegate.loadFromSecondLevelCache( persister, entityKey, instanceToLoad, lockMode );
}
@Override
public SessionAssociationMarkers getSessionAssociationMarkers() {
return delegate.getSessionAssociationMarkers();
}
@Override
public boolean isIdentifierRollbackEnabled() {
return delegate.isIdentifierRollbackEnabled();
}
@Override
public void afterObtainConnection(Connection connection) throws SQLException {
delegate.afterObtainConnection( connection );
}
@Override
public void beforeReleaseConnection(Connection connection) throws SQLException {
delegate.beforeReleaseConnection( connection );
}
}
|
googleapis/google-cloud-java | 35,661 | java-deploy/proto-google-cloud-deploy-v1/src/main/java/com/google/cloud/deploy/v1/DeliveryPipelineNotificationEvent.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/deploy/v1/deliverypipeline_notification_payload.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.deploy.v1;
/**
*
*
* <pre>
* Payload proto for "clouddeploy.googleapis.com/deliverypipeline_notification"
* Platform Log event that describes the failure to send delivery pipeline
* status change Pub/Sub notification.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.DeliveryPipelineNotificationEvent}
*/
public final class DeliveryPipelineNotificationEvent extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.deploy.v1.DeliveryPipelineNotificationEvent)
DeliveryPipelineNotificationEventOrBuilder {
private static final long serialVersionUID = 0L;
// Use DeliveryPipelineNotificationEvent.newBuilder() to construct.
private DeliveryPipelineNotificationEvent(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DeliveryPipelineNotificationEvent() {
message_ = "";
pipelineUid_ = "";
deliveryPipeline_ = "";
type_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DeliveryPipelineNotificationEvent();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.DeliveryPipelineNotificationPayloadProto
.internal_static_google_cloud_deploy_v1_DeliveryPipelineNotificationEvent_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.DeliveryPipelineNotificationPayloadProto
.internal_static_google_cloud_deploy_v1_DeliveryPipelineNotificationEvent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent.class,
com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent.Builder.class);
}
public static final int MESSAGE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object message_ = "";
/**
*
*
* <pre>
* Debug message for when a notification fails to send.
* </pre>
*
* <code>string message = 1;</code>
*
* @return The message.
*/
@java.lang.Override
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
message_ = s;
return s;
}
}
/**
*
*
* <pre>
* Debug message for when a notification fails to send.
* </pre>
*
* <code>string message = 1;</code>
*
* @return The bytes for message.
*/
@java.lang.Override
public com.google.protobuf.ByteString getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PIPELINE_UID_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object pipelineUid_ = "";
/**
*
*
* <pre>
* Unique identifier of the `DeliveryPipeline`.
* </pre>
*
* <code>string pipeline_uid = 4;</code>
*
* @return The pipelineUid.
*/
@java.lang.Override
public java.lang.String getPipelineUid() {
java.lang.Object ref = pipelineUid_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pipelineUid_ = s;
return s;
}
}
/**
*
*
* <pre>
* Unique identifier of the `DeliveryPipeline`.
* </pre>
*
* <code>string pipeline_uid = 4;</code>
*
* @return The bytes for pipelineUid.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPipelineUidBytes() {
java.lang.Object ref = pipelineUid_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pipelineUid_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DELIVERY_PIPELINE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object deliveryPipeline_ = "";
/**
*
*
* <pre>
* The name of the `Delivery Pipeline`.
* </pre>
*
* <code>string delivery_pipeline = 2;</code>
*
* @return The deliveryPipeline.
*/
@java.lang.Override
public java.lang.String getDeliveryPipeline() {
java.lang.Object ref = deliveryPipeline_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
deliveryPipeline_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the `Delivery Pipeline`.
* </pre>
*
* <code>string delivery_pipeline = 2;</code>
*
* @return The bytes for deliveryPipeline.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDeliveryPipelineBytes() {
java.lang.Object ref = deliveryPipeline_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
deliveryPipeline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TYPE_FIELD_NUMBER = 3;
private int type_ = 0;
/**
*
*
* <pre>
* Type of this notification, e.g. for a Pub/Sub failure.
* </pre>
*
* <code>.google.cloud.deploy.v1.Type type = 3;</code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* Type of this notification, e.g. for a Pub/Sub failure.
* </pre>
*
* <code>.google.cloud.deploy.v1.Type type = 3;</code>
*
* @return The type.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.Type getType() {
com.google.cloud.deploy.v1.Type result = com.google.cloud.deploy.v1.Type.forNumber(type_);
return result == null ? com.google.cloud.deploy.v1.Type.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, message_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deliveryPipeline_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, deliveryPipeline_);
}
if (type_ != com.google.cloud.deploy.v1.Type.TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(3, type_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pipelineUid_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pipelineUid_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(message_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, message_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(deliveryPipeline_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, deliveryPipeline_);
}
if (type_ != com.google.cloud.deploy.v1.Type.TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(3, type_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pipelineUid_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pipelineUid_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent)) {
return super.equals(obj);
}
com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent other =
(com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent) obj;
if (!getMessage().equals(other.getMessage())) return false;
if (!getPipelineUid().equals(other.getPipelineUid())) return false;
if (!getDeliveryPipeline().equals(other.getDeliveryPipeline())) return false;
if (type_ != other.type_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
hash = (53 * hash) + getMessage().hashCode();
hash = (37 * hash) + PIPELINE_UID_FIELD_NUMBER;
hash = (53 * hash) + getPipelineUid().hashCode();
hash = (37 * hash) + DELIVERY_PIPELINE_FIELD_NUMBER;
hash = (53 * hash) + getDeliveryPipeline().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Payload proto for "clouddeploy.googleapis.com/deliverypipeline_notification"
* Platform Log event that describes the failure to send delivery pipeline
* status change Pub/Sub notification.
* </pre>
*
* Protobuf type {@code google.cloud.deploy.v1.DeliveryPipelineNotificationEvent}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.deploy.v1.DeliveryPipelineNotificationEvent)
com.google.cloud.deploy.v1.DeliveryPipelineNotificationEventOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.deploy.v1.DeliveryPipelineNotificationPayloadProto
.internal_static_google_cloud_deploy_v1_DeliveryPipelineNotificationEvent_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.deploy.v1.DeliveryPipelineNotificationPayloadProto
.internal_static_google_cloud_deploy_v1_DeliveryPipelineNotificationEvent_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent.class,
com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent.Builder.class);
}
// Construct using com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
message_ = "";
pipelineUid_ = "";
deliveryPipeline_ = "";
type_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.deploy.v1.DeliveryPipelineNotificationPayloadProto
.internal_static_google_cloud_deploy_v1_DeliveryPipelineNotificationEvent_descriptor;
}
@java.lang.Override
public com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent
getDefaultInstanceForType() {
return com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent build() {
com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent buildPartial() {
com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent result =
new com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.message_ = message_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pipelineUid_ = pipelineUid_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.deliveryPipeline_ = deliveryPipeline_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.type_ = type_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent) {
return mergeFrom((com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent other) {
if (other
== com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent.getDefaultInstance())
return this;
if (!other.getMessage().isEmpty()) {
message_ = other.message_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getPipelineUid().isEmpty()) {
pipelineUid_ = other.pipelineUid_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getDeliveryPipeline().isEmpty()) {
deliveryPipeline_ = other.deliveryPipeline_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.type_ != 0) {
setTypeValue(other.getTypeValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
message_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
deliveryPipeline_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 18
case 24:
{
type_ = input.readEnum();
bitField0_ |= 0x00000008;
break;
} // case 24
case 34:
{
pipelineUid_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object message_ = "";
/**
*
*
* <pre>
* Debug message for when a notification fails to send.
* </pre>
*
* <code>string message = 1;</code>
*
* @return The message.
*/
public java.lang.String getMessage() {
java.lang.Object ref = message_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
message_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Debug message for when a notification fails to send.
* </pre>
*
* <code>string message = 1;</code>
*
* @return The bytes for message.
*/
public com.google.protobuf.ByteString getMessageBytes() {
java.lang.Object ref = message_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
message_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Debug message for when a notification fails to send.
* </pre>
*
* <code>string message = 1;</code>
*
* @param value The message to set.
* @return This builder for chaining.
*/
public Builder setMessage(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
message_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Debug message for when a notification fails to send.
* </pre>
*
* <code>string message = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearMessage() {
message_ = getDefaultInstance().getMessage();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Debug message for when a notification fails to send.
* </pre>
*
* <code>string message = 1;</code>
*
* @param value The bytes for message to set.
* @return This builder for chaining.
*/
public Builder setMessageBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
message_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object pipelineUid_ = "";
/**
*
*
* <pre>
* Unique identifier of the `DeliveryPipeline`.
* </pre>
*
* <code>string pipeline_uid = 4;</code>
*
* @return The pipelineUid.
*/
public java.lang.String getPipelineUid() {
java.lang.Object ref = pipelineUid_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pipelineUid_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Unique identifier of the `DeliveryPipeline`.
* </pre>
*
* <code>string pipeline_uid = 4;</code>
*
* @return The bytes for pipelineUid.
*/
public com.google.protobuf.ByteString getPipelineUidBytes() {
java.lang.Object ref = pipelineUid_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pipelineUid_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Unique identifier of the `DeliveryPipeline`.
* </pre>
*
* <code>string pipeline_uid = 4;</code>
*
* @param value The pipelineUid to set.
* @return This builder for chaining.
*/
public Builder setPipelineUid(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pipelineUid_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Unique identifier of the `DeliveryPipeline`.
* </pre>
*
* <code>string pipeline_uid = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearPipelineUid() {
pipelineUid_ = getDefaultInstance().getPipelineUid();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Unique identifier of the `DeliveryPipeline`.
* </pre>
*
* <code>string pipeline_uid = 4;</code>
*
* @param value The bytes for pipelineUid to set.
* @return This builder for chaining.
*/
public Builder setPipelineUidBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pipelineUid_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object deliveryPipeline_ = "";
/**
*
*
* <pre>
* The name of the `Delivery Pipeline`.
* </pre>
*
* <code>string delivery_pipeline = 2;</code>
*
* @return The deliveryPipeline.
*/
public java.lang.String getDeliveryPipeline() {
java.lang.Object ref = deliveryPipeline_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
deliveryPipeline_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the `Delivery Pipeline`.
* </pre>
*
* <code>string delivery_pipeline = 2;</code>
*
* @return The bytes for deliveryPipeline.
*/
public com.google.protobuf.ByteString getDeliveryPipelineBytes() {
java.lang.Object ref = deliveryPipeline_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
deliveryPipeline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the `Delivery Pipeline`.
* </pre>
*
* <code>string delivery_pipeline = 2;</code>
*
* @param value The deliveryPipeline to set.
* @return This builder for chaining.
*/
public Builder setDeliveryPipeline(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
deliveryPipeline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the `Delivery Pipeline`.
* </pre>
*
* <code>string delivery_pipeline = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearDeliveryPipeline() {
deliveryPipeline_ = getDefaultInstance().getDeliveryPipeline();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the `Delivery Pipeline`.
* </pre>
*
* <code>string delivery_pipeline = 2;</code>
*
* @param value The bytes for deliveryPipeline to set.
* @return This builder for chaining.
*/
public Builder setDeliveryPipelineBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
deliveryPipeline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private int type_ = 0;
/**
*
*
* <pre>
* Type of this notification, e.g. for a Pub/Sub failure.
* </pre>
*
* <code>.google.cloud.deploy.v1.Type type = 3;</code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* Type of this notification, e.g. for a Pub/Sub failure.
* </pre>
*
* <code>.google.cloud.deploy.v1.Type type = 3;</code>
*
* @param value The enum numeric value on the wire for type to set.
* @return This builder for chaining.
*/
public Builder setTypeValue(int value) {
type_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Type of this notification, e.g. for a Pub/Sub failure.
* </pre>
*
* <code>.google.cloud.deploy.v1.Type type = 3;</code>
*
* @return The type.
*/
@java.lang.Override
public com.google.cloud.deploy.v1.Type getType() {
com.google.cloud.deploy.v1.Type result = com.google.cloud.deploy.v1.Type.forNumber(type_);
return result == null ? com.google.cloud.deploy.v1.Type.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* Type of this notification, e.g. for a Pub/Sub failure.
* </pre>
*
* <code>.google.cloud.deploy.v1.Type type = 3;</code>
*
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(com.google.cloud.deploy.v1.Type value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000008;
type_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Type of this notification, e.g. for a Pub/Sub failure.
* </pre>
*
* <code>.google.cloud.deploy.v1.Type type = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000008);
type_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.deploy.v1.DeliveryPipelineNotificationEvent)
}
// @@protoc_insertion_point(class_scope:google.cloud.deploy.v1.DeliveryPipelineNotificationEvent)
private static final com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent();
}
public static com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DeliveryPipelineNotificationEvent> PARSER =
new com.google.protobuf.AbstractParser<DeliveryPipelineNotificationEvent>() {
@java.lang.Override
public DeliveryPipelineNotificationEvent parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DeliveryPipelineNotificationEvent> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DeliveryPipelineNotificationEvent> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.deploy.v1.DeliveryPipelineNotificationEvent getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,822 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/InstanceGroupManagersPatchPerInstanceConfigsReq.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* InstanceGroupManagers.patchPerInstanceConfigs
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq}
*/
public final class InstanceGroupManagersPatchPerInstanceConfigsReq
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq)
InstanceGroupManagersPatchPerInstanceConfigsReqOrBuilder {
private static final long serialVersionUID = 0L;
// Use InstanceGroupManagersPatchPerInstanceConfigsReq.newBuilder() to construct.
private InstanceGroupManagersPatchPerInstanceConfigsReq(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private InstanceGroupManagersPatchPerInstanceConfigsReq() {
perInstanceConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new InstanceGroupManagersPatchPerInstanceConfigsReq();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstanceGroupManagersPatchPerInstanceConfigsReq_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstanceGroupManagersPatchPerInstanceConfigsReq_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq.class,
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq.Builder
.class);
}
public static final int PER_INSTANCE_CONFIGS_FIELD_NUMBER = 526265001;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> perInstanceConfigs_;
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> getPerInstanceConfigsList() {
return perInstanceConfigs_;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
getPerInstanceConfigsOrBuilderList() {
return perInstanceConfigs_;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public int getPerInstanceConfigsCount() {
return perInstanceConfigs_.size();
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.PerInstanceConfig getPerInstanceConfigs(int index) {
return perInstanceConfigs_.get(index);
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
@java.lang.Override
public com.google.cloud.compute.v1.PerInstanceConfigOrBuilder getPerInstanceConfigsOrBuilder(
int index) {
return perInstanceConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < perInstanceConfigs_.size(); i++) {
output.writeMessage(526265001, perInstanceConfigs_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < perInstanceConfigs_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
526265001, perInstanceConfigs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq other =
(com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq) obj;
if (!getPerInstanceConfigsList().equals(other.getPerInstanceConfigsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPerInstanceConfigsCount() > 0) {
hash = (37 * hash) + PER_INSTANCE_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getPerInstanceConfigsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* InstanceGroupManagers.patchPerInstanceConfigs
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq)
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReqOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstanceGroupManagersPatchPerInstanceConfigsReq_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstanceGroupManagersPatchPerInstanceConfigsReq_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq.class,
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq.Builder
.class);
}
// Construct using
// com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (perInstanceConfigsBuilder_ == null) {
perInstanceConfigs_ = java.util.Collections.emptyList();
} else {
perInstanceConfigs_ = null;
perInstanceConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_InstanceGroupManagersPatchPerInstanceConfigsReq_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq build() {
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
buildPartial() {
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq result =
new com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq result) {
if (perInstanceConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
perInstanceConfigs_ = java.util.Collections.unmodifiableList(perInstanceConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.perInstanceConfigs_ = perInstanceConfigs_;
} else {
result.perInstanceConfigs_ = perInstanceConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq) {
return mergeFrom(
(com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq other) {
if (other
== com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
.getDefaultInstance()) return this;
if (perInstanceConfigsBuilder_ == null) {
if (!other.perInstanceConfigs_.isEmpty()) {
if (perInstanceConfigs_.isEmpty()) {
perInstanceConfigs_ = other.perInstanceConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.addAll(other.perInstanceConfigs_);
}
onChanged();
}
} else {
if (!other.perInstanceConfigs_.isEmpty()) {
if (perInstanceConfigsBuilder_.isEmpty()) {
perInstanceConfigsBuilder_.dispose();
perInstanceConfigsBuilder_ = null;
perInstanceConfigs_ = other.perInstanceConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
perInstanceConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPerInstanceConfigsFieldBuilder()
: null;
} else {
perInstanceConfigsBuilder_.addAllMessages(other.perInstanceConfigs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case -84847286:
{
com.google.cloud.compute.v1.PerInstanceConfig m =
input.readMessage(
com.google.cloud.compute.v1.PerInstanceConfig.parser(), extensionRegistry);
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(m);
} else {
perInstanceConfigsBuilder_.addMessage(m);
}
break;
} // case -84847286
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.compute.v1.PerInstanceConfig> perInstanceConfigs_ =
java.util.Collections.emptyList();
private void ensurePerInstanceConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
perInstanceConfigs_ =
new java.util.ArrayList<com.google.cloud.compute.v1.PerInstanceConfig>(
perInstanceConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.PerInstanceConfig,
com.google.cloud.compute.v1.PerInstanceConfig.Builder,
com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
perInstanceConfigsBuilder_;
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig>
getPerInstanceConfigsList() {
if (perInstanceConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(perInstanceConfigs_);
} else {
return perInstanceConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public int getPerInstanceConfigsCount() {
if (perInstanceConfigsBuilder_ == null) {
return perInstanceConfigs_.size();
} else {
return perInstanceConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig getPerInstanceConfigs(int index) {
if (perInstanceConfigsBuilder_ == null) {
return perInstanceConfigs_.get(index);
} else {
return perInstanceConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder setPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig value) {
if (perInstanceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.set(index, value);
onChanged();
} else {
perInstanceConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder setPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.set(index, builderForValue.build());
onChanged();
} else {
perInstanceConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(com.google.cloud.compute.v1.PerInstanceConfig value) {
if (perInstanceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(value);
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig value) {
if (perInstanceConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(index, value);
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(
com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(builderForValue.build());
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addPerInstanceConfigs(
int index, com.google.cloud.compute.v1.PerInstanceConfig.Builder builderForValue) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.add(index, builderForValue.build());
onChanged();
} else {
perInstanceConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder addAllPerInstanceConfigs(
java.lang.Iterable<? extends com.google.cloud.compute.v1.PerInstanceConfig> values) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, perInstanceConfigs_);
onChanged();
} else {
perInstanceConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder clearPerInstanceConfigs() {
if (perInstanceConfigsBuilder_ == null) {
perInstanceConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
perInstanceConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public Builder removePerInstanceConfigs(int index) {
if (perInstanceConfigsBuilder_ == null) {
ensurePerInstanceConfigsIsMutable();
perInstanceConfigs_.remove(index);
onChanged();
} else {
perInstanceConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig.Builder getPerInstanceConfigsBuilder(
int index) {
return getPerInstanceConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfigOrBuilder getPerInstanceConfigsOrBuilder(
int index) {
if (perInstanceConfigsBuilder_ == null) {
return perInstanceConfigs_.get(index);
} else {
return perInstanceConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public java.util.List<? extends com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
getPerInstanceConfigsOrBuilderList() {
if (perInstanceConfigsBuilder_ != null) {
return perInstanceConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(perInstanceConfigs_);
}
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig.Builder addPerInstanceConfigsBuilder() {
return getPerInstanceConfigsFieldBuilder()
.addBuilder(com.google.cloud.compute.v1.PerInstanceConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public com.google.cloud.compute.v1.PerInstanceConfig.Builder addPerInstanceConfigsBuilder(
int index) {
return getPerInstanceConfigsFieldBuilder()
.addBuilder(index, com.google.cloud.compute.v1.PerInstanceConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of per-instance configurations to insert or patch on this managed instance group.
* </pre>
*
* <code>repeated .google.cloud.compute.v1.PerInstanceConfig per_instance_configs = 526265001;
* </code>
*/
public java.util.List<com.google.cloud.compute.v1.PerInstanceConfig.Builder>
getPerInstanceConfigsBuilderList() {
return getPerInstanceConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.PerInstanceConfig,
com.google.cloud.compute.v1.PerInstanceConfig.Builder,
com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>
getPerInstanceConfigsFieldBuilder() {
if (perInstanceConfigsBuilder_ == null) {
perInstanceConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.compute.v1.PerInstanceConfig,
com.google.cloud.compute.v1.PerInstanceConfig.Builder,
com.google.cloud.compute.v1.PerInstanceConfigOrBuilder>(
perInstanceConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
perInstanceConfigs_ = null;
}
return perInstanceConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq)
private static final com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq();
}
public static com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<InstanceGroupManagersPatchPerInstanceConfigsReq>
PARSER =
new com.google.protobuf.AbstractParser<
InstanceGroupManagersPatchPerInstanceConfigsReq>() {
@java.lang.Override
public InstanceGroupManagersPatchPerInstanceConfigsReq parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<InstanceGroupManagersPatchPerInstanceConfigsReq>
parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<InstanceGroupManagersPatchPerInstanceConfigsReq>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.InstanceGroupManagersPatchPerInstanceConfigsReq
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/sdk-platform-java | 36,063 | test/integration/goldens/pubsub/src/com/google/cloud/pubsub/v1/stub/GrpcSubscriberStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.pubsub.v1.stub;
import static com.google.cloud.pubsub.v1.SubscriptionAdminClient.ListSnapshotsPagedResponse;
import static com.google.cloud.pubsub.v1.SubscriptionAdminClient.ListSubscriptionsPagedResponse;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.grpc.GrpcCallSettings;
import com.google.api.gax.grpc.GrpcStubCallableFactory;
import com.google.api.gax.rpc.BidiStreamingCallable;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.stub.GrpcOperationsStub;
import com.google.protobuf.Empty;
import com.google.pubsub.v1.AcknowledgeRequest;
import com.google.pubsub.v1.CreateSnapshotRequest;
import com.google.pubsub.v1.DeleteSnapshotRequest;
import com.google.pubsub.v1.DeleteSubscriptionRequest;
import com.google.pubsub.v1.GetSnapshotRequest;
import com.google.pubsub.v1.GetSubscriptionRequest;
import com.google.pubsub.v1.ListSnapshotsRequest;
import com.google.pubsub.v1.ListSnapshotsResponse;
import com.google.pubsub.v1.ListSubscriptionsRequest;
import com.google.pubsub.v1.ListSubscriptionsResponse;
import com.google.pubsub.v1.ModifyAckDeadlineRequest;
import com.google.pubsub.v1.ModifyPushConfigRequest;
import com.google.pubsub.v1.PullRequest;
import com.google.pubsub.v1.PullResponse;
import com.google.pubsub.v1.SeekRequest;
import com.google.pubsub.v1.SeekResponse;
import com.google.pubsub.v1.Snapshot;
import com.google.pubsub.v1.StreamingPullRequest;
import com.google.pubsub.v1.StreamingPullResponse;
import com.google.pubsub.v1.Subscription;
import com.google.pubsub.v1.UpdateSnapshotRequest;
import com.google.pubsub.v1.UpdateSubscriptionRequest;
import io.grpc.MethodDescriptor;
import io.grpc.protobuf.ProtoUtils;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* gRPC stub implementation for the Subscriber service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@Generated("by gapic-generator-java")
public class GrpcSubscriberStub extends SubscriberStub {
private static final MethodDescriptor<Subscription, Subscription>
createSubscriptionMethodDescriptor =
MethodDescriptor.<Subscription, Subscription>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/CreateSubscription")
.setRequestMarshaller(ProtoUtils.marshaller(Subscription.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Subscription.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetSubscriptionRequest, Subscription>
getSubscriptionMethodDescriptor =
MethodDescriptor.<GetSubscriptionRequest, Subscription>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/GetSubscription")
.setRequestMarshaller(
ProtoUtils.marshaller(GetSubscriptionRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Subscription.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateSubscriptionRequest, Subscription>
updateSubscriptionMethodDescriptor =
MethodDescriptor.<UpdateSubscriptionRequest, Subscription>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/UpdateSubscription")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateSubscriptionRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Subscription.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListSubscriptionsRequest, ListSubscriptionsResponse>
listSubscriptionsMethodDescriptor =
MethodDescriptor.<ListSubscriptionsRequest, ListSubscriptionsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/ListSubscriptions")
.setRequestMarshaller(
ProtoUtils.marshaller(ListSubscriptionsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListSubscriptionsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteSubscriptionRequest, Empty>
deleteSubscriptionMethodDescriptor =
MethodDescriptor.<DeleteSubscriptionRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/DeleteSubscription")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteSubscriptionRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ModifyAckDeadlineRequest, Empty>
modifyAckDeadlineMethodDescriptor =
MethodDescriptor.<ModifyAckDeadlineRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/ModifyAckDeadline")
.setRequestMarshaller(
ProtoUtils.marshaller(ModifyAckDeadlineRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<AcknowledgeRequest, Empty> acknowledgeMethodDescriptor =
MethodDescriptor.<AcknowledgeRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/Acknowledge")
.setRequestMarshaller(ProtoUtils.marshaller(AcknowledgeRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<PullRequest, PullResponse> pullMethodDescriptor =
MethodDescriptor.<PullRequest, PullResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/Pull")
.setRequestMarshaller(ProtoUtils.marshaller(PullRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(PullResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<StreamingPullRequest, StreamingPullResponse>
streamingPullMethodDescriptor =
MethodDescriptor.<StreamingPullRequest, StreamingPullResponse>newBuilder()
.setType(MethodDescriptor.MethodType.BIDI_STREAMING)
.setFullMethodName("google.pubsub.v1.Subscriber/StreamingPull")
.setRequestMarshaller(
ProtoUtils.marshaller(StreamingPullRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(StreamingPullResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ModifyPushConfigRequest, Empty>
modifyPushConfigMethodDescriptor =
MethodDescriptor.<ModifyPushConfigRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/ModifyPushConfig")
.setRequestMarshaller(
ProtoUtils.marshaller(ModifyPushConfigRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetSnapshotRequest, Snapshot> getSnapshotMethodDescriptor =
MethodDescriptor.<GetSnapshotRequest, Snapshot>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/GetSnapshot")
.setRequestMarshaller(ProtoUtils.marshaller(GetSnapshotRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Snapshot.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<ListSnapshotsRequest, ListSnapshotsResponse>
listSnapshotsMethodDescriptor =
MethodDescriptor.<ListSnapshotsRequest, ListSnapshotsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/ListSnapshots")
.setRequestMarshaller(
ProtoUtils.marshaller(ListSnapshotsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(ListSnapshotsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<CreateSnapshotRequest, Snapshot>
createSnapshotMethodDescriptor =
MethodDescriptor.<CreateSnapshotRequest, Snapshot>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/CreateSnapshot")
.setRequestMarshaller(
ProtoUtils.marshaller(CreateSnapshotRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Snapshot.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<UpdateSnapshotRequest, Snapshot>
updateSnapshotMethodDescriptor =
MethodDescriptor.<UpdateSnapshotRequest, Snapshot>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/UpdateSnapshot")
.setRequestMarshaller(
ProtoUtils.marshaller(UpdateSnapshotRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Snapshot.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<DeleteSnapshotRequest, Empty>
deleteSnapshotMethodDescriptor =
MethodDescriptor.<DeleteSnapshotRequest, Empty>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/DeleteSnapshot")
.setRequestMarshaller(
ProtoUtils.marshaller(DeleteSnapshotRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Empty.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<SeekRequest, SeekResponse> seekMethodDescriptor =
MethodDescriptor.<SeekRequest, SeekResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.pubsub.v1.Subscriber/Seek")
.setRequestMarshaller(ProtoUtils.marshaller(SeekRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(SeekResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<SetIamPolicyRequest, Policy> setIamPolicyMethodDescriptor =
MethodDescriptor.<SetIamPolicyRequest, Policy>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.iam.v1.IAMPolicy/SetIamPolicy")
.setRequestMarshaller(ProtoUtils.marshaller(SetIamPolicyRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<GetIamPolicyRequest, Policy> getIamPolicyMethodDescriptor =
MethodDescriptor.<GetIamPolicyRequest, Policy>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.iam.v1.IAMPolicy/GetIamPolicy")
.setRequestMarshaller(ProtoUtils.marshaller(GetIamPolicyRequest.getDefaultInstance()))
.setResponseMarshaller(ProtoUtils.marshaller(Policy.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private static final MethodDescriptor<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsMethodDescriptor =
MethodDescriptor.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setType(MethodDescriptor.MethodType.UNARY)
.setFullMethodName("google.iam.v1.IAMPolicy/TestIamPermissions")
.setRequestMarshaller(
ProtoUtils.marshaller(TestIamPermissionsRequest.getDefaultInstance()))
.setResponseMarshaller(
ProtoUtils.marshaller(TestIamPermissionsResponse.getDefaultInstance()))
.setSampledToLocalTracing(true)
.build();
private final UnaryCallable<Subscription, Subscription> createSubscriptionCallable;
private final UnaryCallable<GetSubscriptionRequest, Subscription> getSubscriptionCallable;
private final UnaryCallable<UpdateSubscriptionRequest, Subscription> updateSubscriptionCallable;
private final UnaryCallable<ListSubscriptionsRequest, ListSubscriptionsResponse>
listSubscriptionsCallable;
private final UnaryCallable<ListSubscriptionsRequest, ListSubscriptionsPagedResponse>
listSubscriptionsPagedCallable;
private final UnaryCallable<DeleteSubscriptionRequest, Empty> deleteSubscriptionCallable;
private final UnaryCallable<ModifyAckDeadlineRequest, Empty> modifyAckDeadlineCallable;
private final UnaryCallable<AcknowledgeRequest, Empty> acknowledgeCallable;
private final UnaryCallable<PullRequest, PullResponse> pullCallable;
private final BidiStreamingCallable<StreamingPullRequest, StreamingPullResponse>
streamingPullCallable;
private final UnaryCallable<ModifyPushConfigRequest, Empty> modifyPushConfigCallable;
private final UnaryCallable<GetSnapshotRequest, Snapshot> getSnapshotCallable;
private final UnaryCallable<ListSnapshotsRequest, ListSnapshotsResponse> listSnapshotsCallable;
private final UnaryCallable<ListSnapshotsRequest, ListSnapshotsPagedResponse>
listSnapshotsPagedCallable;
private final UnaryCallable<CreateSnapshotRequest, Snapshot> createSnapshotCallable;
private final UnaryCallable<UpdateSnapshotRequest, Snapshot> updateSnapshotCallable;
private final UnaryCallable<DeleteSnapshotRequest, Empty> deleteSnapshotCallable;
private final UnaryCallable<SeekRequest, SeekResponse> seekCallable;
private final UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable;
private final UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable;
private final UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable;
private final BackgroundResource backgroundResources;
private final GrpcOperationsStub operationsStub;
private final GrpcStubCallableFactory callableFactory;
public static final GrpcSubscriberStub create(SubscriberStubSettings settings)
throws IOException {
return new GrpcSubscriberStub(settings, ClientContext.create(settings));
}
public static final GrpcSubscriberStub create(ClientContext clientContext) throws IOException {
return new GrpcSubscriberStub(SubscriberStubSettings.newBuilder().build(), clientContext);
}
public static final GrpcSubscriberStub create(
ClientContext clientContext, GrpcStubCallableFactory callableFactory) throws IOException {
return new GrpcSubscriberStub(
SubscriberStubSettings.newBuilder().build(), clientContext, callableFactory);
}
/**
* Constructs an instance of GrpcSubscriberStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcSubscriberStub(SubscriberStubSettings settings, ClientContext clientContext)
throws IOException {
this(settings, clientContext, new GrpcSubscriberCallableFactory());
}
/**
* Constructs an instance of GrpcSubscriberStub, using the given settings. This is protected so
* that it is easy to make a subclass, but otherwise, the static factory methods should be
* preferred.
*/
protected GrpcSubscriberStub(
SubscriberStubSettings settings,
ClientContext clientContext,
GrpcStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.operationsStub = GrpcOperationsStub.create(clientContext, callableFactory);
GrpcCallSettings<Subscription, Subscription> createSubscriptionTransportSettings =
GrpcCallSettings.<Subscription, Subscription>newBuilder()
.setMethodDescriptor(createSubscriptionMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<GetSubscriptionRequest, Subscription> getSubscriptionTransportSettings =
GrpcCallSettings.<GetSubscriptionRequest, Subscription>newBuilder()
.setMethodDescriptor(getSubscriptionMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("subscription", String.valueOf(request.getSubscription()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateSubscriptionRequest, Subscription> updateSubscriptionTransportSettings =
GrpcCallSettings.<UpdateSubscriptionRequest, Subscription>newBuilder()
.setMethodDescriptor(updateSubscriptionMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add(
"subscription.name", String.valueOf(request.getSubscription().getName()));
return builder.build();
})
.build();
GrpcCallSettings<ListSubscriptionsRequest, ListSubscriptionsResponse>
listSubscriptionsTransportSettings =
GrpcCallSettings.<ListSubscriptionsRequest, ListSubscriptionsResponse>newBuilder()
.setMethodDescriptor(listSubscriptionsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteSubscriptionRequest, Empty> deleteSubscriptionTransportSettings =
GrpcCallSettings.<DeleteSubscriptionRequest, Empty>newBuilder()
.setMethodDescriptor(deleteSubscriptionMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("subscription", String.valueOf(request.getSubscription()));
return builder.build();
})
.build();
GrpcCallSettings<ModifyAckDeadlineRequest, Empty> modifyAckDeadlineTransportSettings =
GrpcCallSettings.<ModifyAckDeadlineRequest, Empty>newBuilder()
.setMethodDescriptor(modifyAckDeadlineMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("subscription", String.valueOf(request.getSubscription()));
return builder.build();
})
.build();
GrpcCallSettings<AcknowledgeRequest, Empty> acknowledgeTransportSettings =
GrpcCallSettings.<AcknowledgeRequest, Empty>newBuilder()
.setMethodDescriptor(acknowledgeMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("subscription", String.valueOf(request.getSubscription()));
return builder.build();
})
.build();
GrpcCallSettings<PullRequest, PullResponse> pullTransportSettings =
GrpcCallSettings.<PullRequest, PullResponse>newBuilder()
.setMethodDescriptor(pullMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("subscription", String.valueOf(request.getSubscription()));
return builder.build();
})
.build();
GrpcCallSettings<StreamingPullRequest, StreamingPullResponse> streamingPullTransportSettings =
GrpcCallSettings.<StreamingPullRequest, StreamingPullResponse>newBuilder()
.setMethodDescriptor(streamingPullMethodDescriptor)
.build();
GrpcCallSettings<ModifyPushConfigRequest, Empty> modifyPushConfigTransportSettings =
GrpcCallSettings.<ModifyPushConfigRequest, Empty>newBuilder()
.setMethodDescriptor(modifyPushConfigMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("subscription", String.valueOf(request.getSubscription()));
return builder.build();
})
.build();
GrpcCallSettings<GetSnapshotRequest, Snapshot> getSnapshotTransportSettings =
GrpcCallSettings.<GetSnapshotRequest, Snapshot>newBuilder()
.setMethodDescriptor(getSnapshotMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("snapshot", String.valueOf(request.getSnapshot()));
return builder.build();
})
.build();
GrpcCallSettings<ListSnapshotsRequest, ListSnapshotsResponse> listSnapshotsTransportSettings =
GrpcCallSettings.<ListSnapshotsRequest, ListSnapshotsResponse>newBuilder()
.setMethodDescriptor(listSnapshotsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("project", String.valueOf(request.getProject()));
return builder.build();
})
.build();
GrpcCallSettings<CreateSnapshotRequest, Snapshot> createSnapshotTransportSettings =
GrpcCallSettings.<CreateSnapshotRequest, Snapshot>newBuilder()
.setMethodDescriptor(createSnapshotMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
GrpcCallSettings<UpdateSnapshotRequest, Snapshot> updateSnapshotTransportSettings =
GrpcCallSettings.<UpdateSnapshotRequest, Snapshot>newBuilder()
.setMethodDescriptor(updateSnapshotMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("snapshot.name", String.valueOf(request.getSnapshot().getName()));
return builder.build();
})
.build();
GrpcCallSettings<DeleteSnapshotRequest, Empty> deleteSnapshotTransportSettings =
GrpcCallSettings.<DeleteSnapshotRequest, Empty>newBuilder()
.setMethodDescriptor(deleteSnapshotMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("snapshot", String.valueOf(request.getSnapshot()));
return builder.build();
})
.build();
GrpcCallSettings<SeekRequest, SeekResponse> seekTransportSettings =
GrpcCallSettings.<SeekRequest, SeekResponse>newBuilder()
.setMethodDescriptor(seekMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("subscription", String.valueOf(request.getSubscription()));
return builder.build();
})
.build();
GrpcCallSettings<SetIamPolicyRequest, Policy> setIamPolicyTransportSettings =
GrpcCallSettings.<SetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(setIamPolicyMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
GrpcCallSettings<GetIamPolicyRequest, Policy> getIamPolicyTransportSettings =
GrpcCallSettings.<GetIamPolicyRequest, Policy>newBuilder()
.setMethodDescriptor(getIamPolicyMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
GrpcCallSettings<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsTransportSettings =
GrpcCallSettings.<TestIamPermissionsRequest, TestIamPermissionsResponse>newBuilder()
.setMethodDescriptor(testIamPermissionsMethodDescriptor)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("resource", String.valueOf(request.getResource()));
return builder.build();
})
.build();
this.createSubscriptionCallable =
callableFactory.createUnaryCallable(
createSubscriptionTransportSettings,
settings.createSubscriptionSettings(),
clientContext);
this.getSubscriptionCallable =
callableFactory.createUnaryCallable(
getSubscriptionTransportSettings, settings.getSubscriptionSettings(), clientContext);
this.updateSubscriptionCallable =
callableFactory.createUnaryCallable(
updateSubscriptionTransportSettings,
settings.updateSubscriptionSettings(),
clientContext);
this.listSubscriptionsCallable =
callableFactory.createUnaryCallable(
listSubscriptionsTransportSettings,
settings.listSubscriptionsSettings(),
clientContext);
this.listSubscriptionsPagedCallable =
callableFactory.createPagedCallable(
listSubscriptionsTransportSettings,
settings.listSubscriptionsSettings(),
clientContext);
this.deleteSubscriptionCallable =
callableFactory.createUnaryCallable(
deleteSubscriptionTransportSettings,
settings.deleteSubscriptionSettings(),
clientContext);
this.modifyAckDeadlineCallable =
callableFactory.createUnaryCallable(
modifyAckDeadlineTransportSettings,
settings.modifyAckDeadlineSettings(),
clientContext);
this.acknowledgeCallable =
callableFactory.createUnaryCallable(
acknowledgeTransportSettings, settings.acknowledgeSettings(), clientContext);
this.pullCallable =
callableFactory.createUnaryCallable(
pullTransportSettings, settings.pullSettings(), clientContext);
this.streamingPullCallable =
callableFactory.createBidiStreamingCallable(
streamingPullTransportSettings, settings.streamingPullSettings(), clientContext);
this.modifyPushConfigCallable =
callableFactory.createUnaryCallable(
modifyPushConfigTransportSettings, settings.modifyPushConfigSettings(), clientContext);
this.getSnapshotCallable =
callableFactory.createUnaryCallable(
getSnapshotTransportSettings, settings.getSnapshotSettings(), clientContext);
this.listSnapshotsCallable =
callableFactory.createUnaryCallable(
listSnapshotsTransportSettings, settings.listSnapshotsSettings(), clientContext);
this.listSnapshotsPagedCallable =
callableFactory.createPagedCallable(
listSnapshotsTransportSettings, settings.listSnapshotsSettings(), clientContext);
this.createSnapshotCallable =
callableFactory.createUnaryCallable(
createSnapshotTransportSettings, settings.createSnapshotSettings(), clientContext);
this.updateSnapshotCallable =
callableFactory.createUnaryCallable(
updateSnapshotTransportSettings, settings.updateSnapshotSettings(), clientContext);
this.deleteSnapshotCallable =
callableFactory.createUnaryCallable(
deleteSnapshotTransportSettings, settings.deleteSnapshotSettings(), clientContext);
this.seekCallable =
callableFactory.createUnaryCallable(
seekTransportSettings, settings.seekSettings(), clientContext);
this.setIamPolicyCallable =
callableFactory.createUnaryCallable(
setIamPolicyTransportSettings, settings.setIamPolicySettings(), clientContext);
this.getIamPolicyCallable =
callableFactory.createUnaryCallable(
getIamPolicyTransportSettings, settings.getIamPolicySettings(), clientContext);
this.testIamPermissionsCallable =
callableFactory.createUnaryCallable(
testIamPermissionsTransportSettings,
settings.testIamPermissionsSettings(),
clientContext);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
public GrpcOperationsStub getOperationsStub() {
return operationsStub;
}
@Override
public UnaryCallable<Subscription, Subscription> createSubscriptionCallable() {
return createSubscriptionCallable;
}
@Override
public UnaryCallable<GetSubscriptionRequest, Subscription> getSubscriptionCallable() {
return getSubscriptionCallable;
}
@Override
public UnaryCallable<UpdateSubscriptionRequest, Subscription> updateSubscriptionCallable() {
return updateSubscriptionCallable;
}
@Override
public UnaryCallable<ListSubscriptionsRequest, ListSubscriptionsResponse>
listSubscriptionsCallable() {
return listSubscriptionsCallable;
}
@Override
public UnaryCallable<ListSubscriptionsRequest, ListSubscriptionsPagedResponse>
listSubscriptionsPagedCallable() {
return listSubscriptionsPagedCallable;
}
@Override
public UnaryCallable<DeleteSubscriptionRequest, Empty> deleteSubscriptionCallable() {
return deleteSubscriptionCallable;
}
@Override
public UnaryCallable<ModifyAckDeadlineRequest, Empty> modifyAckDeadlineCallable() {
return modifyAckDeadlineCallable;
}
@Override
public UnaryCallable<AcknowledgeRequest, Empty> acknowledgeCallable() {
return acknowledgeCallable;
}
@Override
public UnaryCallable<PullRequest, PullResponse> pullCallable() {
return pullCallable;
}
@Override
public BidiStreamingCallable<StreamingPullRequest, StreamingPullResponse>
streamingPullCallable() {
return streamingPullCallable;
}
@Override
public UnaryCallable<ModifyPushConfigRequest, Empty> modifyPushConfigCallable() {
return modifyPushConfigCallable;
}
@Override
public UnaryCallable<GetSnapshotRequest, Snapshot> getSnapshotCallable() {
return getSnapshotCallable;
}
@Override
public UnaryCallable<ListSnapshotsRequest, ListSnapshotsResponse> listSnapshotsCallable() {
return listSnapshotsCallable;
}
@Override
public UnaryCallable<ListSnapshotsRequest, ListSnapshotsPagedResponse>
listSnapshotsPagedCallable() {
return listSnapshotsPagedCallable;
}
@Override
public UnaryCallable<CreateSnapshotRequest, Snapshot> createSnapshotCallable() {
return createSnapshotCallable;
}
@Override
public UnaryCallable<UpdateSnapshotRequest, Snapshot> updateSnapshotCallable() {
return updateSnapshotCallable;
}
@Override
public UnaryCallable<DeleteSnapshotRequest, Empty> deleteSnapshotCallable() {
return deleteSnapshotCallable;
}
@Override
public UnaryCallable<SeekRequest, SeekResponse> seekCallable() {
return seekCallable;
}
@Override
public UnaryCallable<SetIamPolicyRequest, Policy> setIamPolicyCallable() {
return setIamPolicyCallable;
}
@Override
public UnaryCallable<GetIamPolicyRequest, Policy> getIamPolicyCallable() {
return getIamPolicyCallable;
}
@Override
public UnaryCallable<TestIamPermissionsRequest, TestIamPermissionsResponse>
testIamPermissionsCallable() {
return testIamPermissionsCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
apache/iotdb | 35,891 | iotdb-core/confignode/src/main/java/org/apache/iotdb/confignode/persistence/pipe/PipeTaskInfo.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.confignode.persistence.pipe;
import org.apache.iotdb.common.rpc.thrift.TSStatus;
import org.apache.iotdb.commons.consensus.index.impl.MinimumProgressIndex;
import org.apache.iotdb.commons.exception.pipe.PipeRuntimeCriticalException;
import org.apache.iotdb.commons.exception.pipe.PipeRuntimeException;
import org.apache.iotdb.commons.pipe.agent.plugin.builtin.BuiltinPipePlugin;
import org.apache.iotdb.commons.pipe.agent.task.meta.PipeMeta;
import org.apache.iotdb.commons.pipe.agent.task.meta.PipeMetaKeeper;
import org.apache.iotdb.commons.pipe.agent.task.meta.PipeRuntimeMeta;
import org.apache.iotdb.commons.pipe.agent.task.meta.PipeStaticMeta;
import org.apache.iotdb.commons.pipe.agent.task.meta.PipeStatus;
import org.apache.iotdb.commons.pipe.agent.task.meta.PipeTaskMeta;
import org.apache.iotdb.commons.pipe.agent.task.meta.PipeTemporaryMeta;
import org.apache.iotdb.commons.pipe.agent.task.meta.PipeTemporaryMetaInCoordinator;
import org.apache.iotdb.commons.pipe.agent.task.meta.PipeType;
import org.apache.iotdb.commons.pipe.config.PipeConfig;
import org.apache.iotdb.commons.pipe.config.constant.PipeProcessorConstant;
import org.apache.iotdb.commons.pipe.config.constant.PipeSinkConstant;
import org.apache.iotdb.commons.pipe.config.constant.PipeSourceConstant;
import org.apache.iotdb.commons.snapshot.SnapshotProcessor;
import org.apache.iotdb.confignode.consensus.request.ConfigPhysicalPlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.runtime.PipeHandleLeaderChangePlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.runtime.PipeHandleMetaChangePlan;
import org.apache.iotdb.confignode.consensus.request.write.pipe.task.AlterPipePlanV2;
import org.apache.iotdb.confignode.consensus.request.write.pipe.task.CreatePipePlanV2;
import org.apache.iotdb.confignode.consensus.request.write.pipe.task.DropPipePlanV2;
import org.apache.iotdb.confignode.consensus.request.write.pipe.task.OperateMultiplePipesPlanV2;
import org.apache.iotdb.confignode.consensus.request.write.pipe.task.SetPipeStatusPlanV2;
import org.apache.iotdb.confignode.consensus.response.pipe.task.PipeTableResp;
import org.apache.iotdb.confignode.manager.pipe.resource.PipeConfigNodeResourceManager;
import org.apache.iotdb.confignode.procedure.impl.pipe.runtime.PipeHandleMetaChangeProcedure;
import org.apache.iotdb.confignode.rpc.thrift.TAlterPipeReq;
import org.apache.iotdb.confignode.rpc.thrift.TCreatePipeReq;
import org.apache.iotdb.confignode.service.ConfigNode;
import org.apache.iotdb.consensus.common.DataSet;
import org.apache.iotdb.mpp.rpc.thrift.TPushPipeMetaResp;
import org.apache.iotdb.pipe.api.customizer.parameter.PipeParameters;
import org.apache.iotdb.pipe.api.exception.PipeException;
import org.apache.iotdb.rpc.TSStatusCode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static org.apache.iotdb.commons.pipe.agent.plugin.builtin.BuiltinPipePlugin.IOTDB_THRIFT_CONNECTOR;
import static org.apache.iotdb.commons.pipe.config.constant.PipeRPCMessageConstant.PIPE_ALREADY_EXIST_MSG;
import static org.apache.iotdb.commons.pipe.config.constant.PipeRPCMessageConstant.PIPE_NOT_EXIST_MSG;
public class PipeTaskInfo implements SnapshotProcessor {
private static final Logger LOGGER = LoggerFactory.getLogger(PipeTaskInfo.class);
private static final String SNAPSHOT_FILE_NAME = "pipe_task_info.bin";
private final PipeMetaKeeper pipeMetaKeeper;
// Pure in-memory object, not involved in snapshot serialization and deserialization.
private final PipeTaskInfoVersion pipeTaskInfoVersion;
public PipeTaskInfo() {
this.pipeMetaKeeper = new PipeMetaKeeper();
this.pipeTaskInfoVersion = new PipeTaskInfoVersion();
}
/////////////////////////////// Lock ///////////////////////////////
private void acquireReadLock() {
pipeMetaKeeper.acquireReadLock();
}
private void releaseReadLock() {
pipeMetaKeeper.releaseReadLock();
}
private void acquireWriteLock() {
pipeMetaKeeper.acquireWriteLock();
// We use the number of times obtaining the write lock of PipeMetaKeeper as the version number
// of PipeTaskInfo.
pipeTaskInfoVersion.increaseLatestVersion();
}
private void releaseWriteLock() {
pipeMetaKeeper.releaseWriteLock();
}
/////////////////////////////// Version ///////////////////////////////
private class PipeTaskInfoVersion {
private final AtomicLong latestVersion;
private long lastSyncedVersion;
private boolean isLastSyncedPipeTaskInfoEmpty;
public PipeTaskInfoVersion() {
this.latestVersion = new AtomicLong(0);
this.lastSyncedVersion = 0;
this.isLastSyncedPipeTaskInfoEmpty = false;
}
public void increaseLatestVersion() {
latestVersion.incrementAndGet();
}
public void updateLastSyncedVersion() {
lastSyncedVersion = latestVersion.get();
isLastSyncedPipeTaskInfoEmpty = pipeMetaKeeper.isEmpty();
}
public boolean canSkipNextSync() {
return isLastSyncedPipeTaskInfoEmpty
&& pipeMetaKeeper.isEmpty()
&& lastSyncedVersion == latestVersion.get();
}
}
/** Caller should ensure that the method is called in the lock {@link #acquireWriteLock}. */
public void updateLastSyncedVersion() {
pipeTaskInfoVersion.updateLastSyncedVersion();
}
public boolean canSkipNextSync() {
return pipeTaskInfoVersion.canSkipNextSync();
}
/////////////////////////////// Validator ///////////////////////////////
public boolean checkBeforeCreatePipe(final TCreatePipeReq createPipeRequest)
throws PipeException {
acquireReadLock();
try {
return checkBeforeCreatePipeInternal(createPipeRequest);
} finally {
releaseReadLock();
}
}
private boolean checkBeforeCreatePipeInternal(final TCreatePipeReq createPipeRequest)
throws PipeException {
if (!isPipeExisted(createPipeRequest.getPipeName())) {
return true;
}
if (createPipeRequest.isSetIfNotExistsCondition()
&& createPipeRequest.isIfNotExistsCondition()) {
return false;
}
final String exceptionMessage =
String.format(
"Failed to create pipe %s, %s",
createPipeRequest.getPipeName(), PIPE_ALREADY_EXIST_MSG);
LOGGER.warn(exceptionMessage);
throw new PipeException(exceptionMessage);
}
public void checkAndUpdateRequestBeforeAlterPipe(final TAlterPipeReq alterPipeRequest)
throws PipeException {
acquireReadLock();
try {
checkAndUpdateRequestBeforeAlterPipeInternal(alterPipeRequest);
} finally {
releaseReadLock();
}
}
private void checkAndUpdateRequestBeforeAlterPipeInternal(final TAlterPipeReq alterPipeRequest)
throws PipeException {
if (!isPipeExisted(alterPipeRequest.getPipeName())) {
final String exceptionMessage =
String.format(
"Failed to alter pipe %s, %s", alterPipeRequest.getPipeName(), PIPE_NOT_EXIST_MSG);
LOGGER.warn(exceptionMessage);
throw new PipeException(exceptionMessage);
}
final PipeStaticMeta pipeStaticMetaFromCoordinator =
getPipeMetaByPipeName(alterPipeRequest.getPipeName()).getStaticMeta();
// deep copy current pipe static meta
final PipeStaticMeta copiedPipeStaticMetaFromCoordinator =
new PipeStaticMeta(
pipeStaticMetaFromCoordinator.getPipeName(),
pipeStaticMetaFromCoordinator.getCreationTime(),
new HashMap<>(pipeStaticMetaFromCoordinator.getSourceParameters().getAttribute()),
new HashMap<>(pipeStaticMetaFromCoordinator.getProcessorParameters().getAttribute()),
new HashMap<>(pipeStaticMetaFromCoordinator.getSinkParameters().getAttribute()));
// 1. In modify mode, based on the passed attributes:
// 1.1. if they are empty, the original attributes are filled directly.
// 1.2. Otherwise, corresponding updates on original attributes are performed.
// 2. In replace mode, do nothing here.
if (!alterPipeRequest.isReplaceAllExtractorAttributes) { // modify mode
if (alterPipeRequest.getExtractorAttributes().isEmpty()) {
alterPipeRequest.setExtractorAttributes(
copiedPipeStaticMetaFromCoordinator.getSourceParameters().getAttribute());
} else {
alterPipeRequest.setExtractorAttributes(
copiedPipeStaticMetaFromCoordinator
.getSourceParameters()
.addOrReplaceEquivalentAttributes(
new PipeParameters(alterPipeRequest.getExtractorAttributes()))
.getAttribute());
}
}
if (!alterPipeRequest.isReplaceAllProcessorAttributes) { // modify mode
if (alterPipeRequest.getProcessorAttributes().isEmpty()) {
alterPipeRequest.setProcessorAttributes(
copiedPipeStaticMetaFromCoordinator.getProcessorParameters().getAttribute());
} else {
alterPipeRequest.setProcessorAttributes(
copiedPipeStaticMetaFromCoordinator
.getProcessorParameters()
.addOrReplaceEquivalentAttributes(
new PipeParameters(alterPipeRequest.getProcessorAttributes()))
.getAttribute());
}
}
if (!alterPipeRequest.isReplaceAllConnectorAttributes) { // modify mode
if (alterPipeRequest.getConnectorAttributes().isEmpty()) {
alterPipeRequest.setConnectorAttributes(
copiedPipeStaticMetaFromCoordinator.getSinkParameters().getAttribute());
} else {
alterPipeRequest.setConnectorAttributes(
copiedPipeStaticMetaFromCoordinator
.getSinkParameters()
.addOrReplaceEquivalentAttributes(
new PipeParameters(alterPipeRequest.getConnectorAttributes()))
.getAttribute());
}
}
}
public void checkBeforeStartPipe(final String pipeName) throws PipeException {
acquireReadLock();
try {
checkBeforeStartPipeInternal(pipeName);
} finally {
releaseReadLock();
}
}
private void checkBeforeStartPipeInternal(final String pipeName) throws PipeException {
if (!isPipeExisted(pipeName)) {
final String exceptionMessage =
String.format("Failed to start pipe %s, %s", pipeName, PIPE_NOT_EXIST_MSG);
LOGGER.warn(exceptionMessage);
throw new PipeException(exceptionMessage);
}
final PipeStatus pipeStatus = getPipeStatus(pipeName);
if (pipeStatus == PipeStatus.DROPPED) {
final String exceptionMessage =
String.format("Failed to start pipe %s, the pipe is already dropped", pipeName);
LOGGER.warn(exceptionMessage);
throw new PipeException(exceptionMessage);
}
}
public void checkBeforeStopPipe(final String pipeName) throws PipeException {
acquireReadLock();
try {
checkBeforeStopPipeInternal(pipeName);
} finally {
releaseReadLock();
}
}
private void checkBeforeStopPipeInternal(final String pipeName) throws PipeException {
if (!isPipeExisted(pipeName)) {
final String exceptionMessage =
String.format("Failed to stop pipe %s, %s", pipeName, PIPE_NOT_EXIST_MSG);
LOGGER.warn(exceptionMessage);
throw new PipeException(exceptionMessage);
}
final PipeStatus pipeStatus = getPipeStatus(pipeName);
if (pipeStatus == PipeStatus.DROPPED) {
final String exceptionMessage =
String.format("Failed to stop pipe %s, the pipe is already dropped", pipeName);
LOGGER.warn(exceptionMessage);
throw new PipeException(exceptionMessage);
}
}
public void checkBeforeDropPipe(final String pipeName) {
acquireReadLock();
try {
checkBeforeDropPipeInternal(pipeName);
} finally {
releaseReadLock();
}
}
private void checkBeforeDropPipeInternal(final String pipeName) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Check before drop pipe {}, pipe exists: {}.", pipeName, isPipeExisted(pipeName));
}
// No matter whether the pipe exists, we allow the drop operation executed on all nodes to
// ensure the consistency.
// DO NOTHING HERE!
}
public boolean isPipeExisted(final String pipeName) {
acquireReadLock();
try {
return pipeMetaKeeper.containsPipeMeta(pipeName);
} finally {
releaseReadLock();
}
}
public boolean isPipeExisted(final String pipeName, final boolean isTableModel) {
acquireReadLock();
try {
return pipeMetaKeeper.containsPipeMeta(pipeName, isTableModel);
} finally {
releaseReadLock();
}
}
private PipeStatus getPipeStatus(final String pipeName) {
acquireReadLock();
try {
return pipeMetaKeeper.getPipeMeta(pipeName).getRuntimeMeta().getStatus().get();
} finally {
releaseReadLock();
}
}
public boolean isPipeRunning(final String pipeName) {
acquireReadLock();
try {
return pipeMetaKeeper.containsPipeMeta(pipeName)
&& PipeStatus.RUNNING.equals(getPipeStatus(pipeName));
} finally {
releaseReadLock();
}
}
public boolean isPipeStoppedByUser(final String pipeName) {
acquireReadLock();
try {
return pipeMetaKeeper.containsPipeMeta(pipeName)
&& PipeStatus.STOPPED.equals(getPipeStatus(pipeName))
&& !isStoppedByRuntimeException(pipeName);
} finally {
releaseReadLock();
}
}
public void validatePipePluginUsageByPipe(String pluginName) {
acquireReadLock();
try {
validatePipePluginUsageByPipeInternal(pluginName);
} finally {
releaseReadLock();
}
}
private void validatePipePluginUsageByPipeInternal(String pluginName) {
Iterable<PipeMeta> pipeMetas = getPipeMetaList();
for (PipeMeta pipeMeta : pipeMetas) {
PipeParameters extractorParameters = pipeMeta.getStaticMeta().getSourceParameters();
final String extractorPluginName =
extractorParameters.getStringOrDefault(
Arrays.asList(PipeSourceConstant.EXTRACTOR_KEY, PipeSourceConstant.SOURCE_KEY),
BuiltinPipePlugin.IOTDB_EXTRACTOR.getPipePluginName());
if (pluginName.equals(extractorPluginName)) {
String exceptionMessage =
String.format(
"PipePlugin '%s' is already used by Pipe '%s' as a source.",
pluginName, pipeMeta.getStaticMeta().getPipeName());
throw new PipeException(exceptionMessage);
}
PipeParameters processorParameters = pipeMeta.getStaticMeta().getProcessorParameters();
final String processorPluginName =
processorParameters.getString(PipeProcessorConstant.PROCESSOR_KEY);
if (pluginName.equals(processorPluginName)) {
String exceptionMessage =
String.format(
"PipePlugin '%s' is already used by Pipe '%s' as a processor.",
pluginName, pipeMeta.getStaticMeta().getPipeName());
throw new PipeException(exceptionMessage);
}
PipeParameters connectorParameters = pipeMeta.getStaticMeta().getSinkParameters();
final String connectorPluginName =
connectorParameters.getStringOrDefault(
Arrays.asList(PipeSinkConstant.CONNECTOR_KEY, PipeSinkConstant.SINK_KEY),
IOTDB_THRIFT_CONNECTOR.getPipePluginName());
if (pluginName.equals(connectorPluginName)) {
String exceptionMessage =
String.format(
"PipePlugin '%s' is already used by Pipe '%s' as a sink.",
pluginName, pipeMeta.getStaticMeta().getPipeName());
throw new PipeException(exceptionMessage);
}
}
}
/////////////////////////////// Pipe Task Management ///////////////////////////////
public TSStatus createPipe(final CreatePipePlanV2 plan) {
acquireWriteLock();
try {
pipeMetaKeeper.addPipeMeta(new PipeMeta(plan.getPipeStaticMeta(), plan.getPipeRuntimeMeta()));
return new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode());
} finally {
releaseWriteLock();
}
}
public TSStatus operateMultiplePipes(final OperateMultiplePipesPlanV2 plan) {
acquireWriteLock();
try {
if (plan.getSubPlans() == null || plan.getSubPlans().isEmpty()) {
return new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode());
}
TSStatus status = new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode());
// We use sub-status to record the status of each subPlan
status.setSubStatus(new ArrayList<>());
for (final ConfigPhysicalPlan subPlan : plan.getSubPlans()) {
try {
if (subPlan instanceof CreatePipePlanV2) {
createPipe((CreatePipePlanV2) subPlan);
} else if (subPlan instanceof AlterPipePlanV2) {
alterPipe((AlterPipePlanV2) subPlan);
} else if (subPlan instanceof SetPipeStatusPlanV2) {
setPipeStatus((SetPipeStatusPlanV2) subPlan);
} else if (subPlan instanceof DropPipePlanV2) {
dropPipe((DropPipePlanV2) subPlan);
} else {
throw new PipeException(
String.format("Unsupported subPlan type: %s", subPlan.getClass().getName()));
}
status.getSubStatus().add(new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode()));
} catch (final Exception e) {
// If one of the subPlan fails, we stop operating the rest of the pipes
LOGGER.error("Failed to operate pipe", e);
status.setCode(TSStatusCode.PIPE_ERROR.getStatusCode());
status.getSubStatus().add(new TSStatus(TSStatusCode.PIPE_ERROR.getStatusCode()));
break;
}
}
// If all the subPlans are successful, we return the success status and clear sub-status.
// Otherwise, we return the error status with sub-status to record the failing index.
if (status.getCode() == TSStatusCode.SUCCESS_STATUS.getStatusCode()) {
status.setSubStatus(null);
}
return status;
} finally {
releaseWriteLock();
}
}
public TSStatus alterPipe(final AlterPipePlanV2 plan) {
acquireWriteLock();
try {
final PipeTemporaryMeta temporaryMeta =
pipeMetaKeeper.getPipeMeta(plan.getPipeStaticMeta().getPipeName()).getTemporaryMeta();
pipeMetaKeeper.removePipeMeta(plan.getPipeStaticMeta().getPipeName());
pipeMetaKeeper.addPipeMeta(
new PipeMeta(plan.getPipeStaticMeta(), plan.getPipeRuntimeMeta(), temporaryMeta));
return new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode());
} finally {
releaseWriteLock();
}
}
public TSStatus setPipeStatus(final SetPipeStatusPlanV2 plan) {
acquireWriteLock();
try {
pipeMetaKeeper
.getPipeMeta(plan.getPipeName())
.getRuntimeMeta()
.getStatus()
.set(plan.getPipeStatus());
return new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode());
} finally {
releaseWriteLock();
}
}
public TSStatus dropPipe(final DropPipePlanV2 plan) {
acquireWriteLock();
try {
pipeMetaKeeper.removePipeMeta(plan.getPipeName());
return new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode());
} finally {
releaseWriteLock();
}
}
public DataSet showPipes() {
acquireReadLock();
try {
return new PipeTableResp(
new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode()),
StreamSupport.stream(getPipeMetaList().spliterator(), false)
.collect(Collectors.toList()));
} finally {
releaseReadLock();
}
}
public Iterable<PipeMeta> getPipeMetaList() {
acquireReadLock();
try {
return pipeMetaKeeper.getPipeMetaList();
} finally {
releaseReadLock();
}
}
public PipeMeta getPipeMetaByPipeName(final String pipeName) {
acquireReadLock();
try {
return pipeMetaKeeper.getPipeMetaByPipeName(pipeName);
} finally {
releaseReadLock();
}
}
public boolean isEmpty() {
acquireReadLock();
try {
return pipeMetaKeeper.isEmpty();
} finally {
releaseReadLock();
}
}
/////////////////////////////// Pipe Runtime Management ///////////////////////////////
/** Handle the region leader change event and update the pipe task meta accordingly. */
public TSStatus handleLeaderChange(final PipeHandleLeaderChangePlan plan) {
acquireWriteLock();
try {
return handleLeaderChangeInternal(plan);
} finally {
releaseWriteLock();
}
}
private TSStatus handleLeaderChangeInternal(final PipeHandleLeaderChangePlan plan) {
plan.getConsensusGroupId2NewLeaderIdMap()
.forEach(
(consensusGroupId, newLeader) ->
pipeMetaKeeper
.getPipeMetaList()
.forEach(
pipeMeta -> {
if (PipeType.CONSENSUS.equals(pipeMeta.getStaticMeta().getPipeType())) {
return; // pipe consensus pipe task will not change
}
if (pipeMeta.getStaticMeta().isSourceExternal()) {
// external source pipe tasks are not balanced here since non-leaders
// don't know about RegionLeader Map and will be balanced in the meta
// sync procedure
LOGGER.info(
"Pipe {} is using external source, skip region leader change. PipeHandleLeaderChangePlan: {}",
pipeMeta.getStaticMeta().getPipeName(),
plan.getConsensusGroupId2NewLeaderIdMap());
return;
}
final Map<Integer, PipeTaskMeta> consensusGroupIdToTaskMetaMap =
pipeMeta.getRuntimeMeta().getConsensusGroupId2TaskMetaMap();
if (consensusGroupIdToTaskMetaMap.containsKey(consensusGroupId.getId())) {
// If the region leader is -1, it means the region is
// removed
if (newLeader != -1) {
consensusGroupIdToTaskMetaMap
.get(consensusGroupId.getId())
.setLeaderNodeId(newLeader);
// New region leader may contain un-transferred events
((PipeTemporaryMetaInCoordinator) pipeMeta.getTemporaryMeta())
.markDataNodeUncompleted(newLeader);
} else {
consensusGroupIdToTaskMetaMap.remove(consensusGroupId.getId());
}
} else {
// If CN does not contain the region group, it means the data
// region group is newly added.
if (newLeader != -1) {
consensusGroupIdToTaskMetaMap.put(
consensusGroupId.getId(),
new PipeTaskMeta(MinimumProgressIndex.INSTANCE, newLeader));
}
// else:
// "The pipe task meta does not contain the data region group {} or
// the data region group has already been removed"
}
}));
return new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode());
}
/**
* Replace the local {@link PipeMeta}s by the {@link PipeMeta}s from the leader {@link
* ConfigNode}.
*
* @param plan The plan containing all the {@link PipeMeta}s from leader {@link ConfigNode}
* @return {@link TSStatusCode#SUCCESS_STATUS}
*/
public TSStatus handleMetaChanges(final PipeHandleMetaChangePlan plan) {
acquireWriteLock();
try {
return handleMetaChangesInternal(plan);
} finally {
releaseWriteLock();
}
}
private TSStatus handleMetaChangesInternal(final PipeHandleMetaChangePlan plan) {
LOGGER.debug("Handling pipe meta changes ...");
pipeMetaKeeper.clear();
// This method is only triggered by pipe sync / meta report currently
// And is guaranteed to print log finally
final Optional<Logger> logger =
PipeConfigNodeResourceManager.log()
.schedule(
PipeTaskInfo.class,
PipeConfig.getInstance().getPipeMetaReportMaxLogNumPerRound(),
PipeConfig.getInstance().getPipeMetaReportMaxLogIntervalRounds(),
pipeMetaKeeper.getPipeMetaCount());
plan.getPipeMetaList()
.forEach(
pipeMeta -> {
pipeMetaKeeper.addPipeMeta(pipeMeta);
logger.ifPresent(l -> l.debug("Recording pipe meta: {}", pipeMeta));
});
return new TSStatus(TSStatusCode.SUCCESS_STATUS.getStatusCode());
}
public boolean isStoppedByRuntimeException(final String pipeName) {
acquireReadLock();
try {
return isStoppedByRuntimeExceptionInternal(pipeName);
} finally {
releaseReadLock();
}
}
private boolean isStoppedByRuntimeExceptionInternal(final String pipeName) {
return pipeMetaKeeper.containsPipeMeta(pipeName)
&& pipeMetaKeeper.getPipeMeta(pipeName).getRuntimeMeta().getIsStoppedByRuntimeException();
}
/**
* Clear the exceptions of a pipe locally after it starts successfully.
*
* <p>If there are exceptions cleared or flag changed, the messages will then be updated to all
* the nodes through {@link PipeHandleMetaChangeProcedure}.
*
* @param pipeName The name of the pipe to be clear exception
*/
public void clearExceptionsAndSetIsStoppedByRuntimeExceptionToFalse(final String pipeName) {
acquireWriteLock();
try {
clearExceptionsAndSetIsStoppedByRuntimeExceptionToFalseInternal(pipeName);
} finally {
releaseWriteLock();
}
}
private void clearExceptionsAndSetIsStoppedByRuntimeExceptionToFalseInternal(
final String pipeName) {
if (!pipeMetaKeeper.containsPipeMeta(pipeName)) {
return;
}
final PipeRuntimeMeta runtimeMeta = pipeMetaKeeper.getPipeMeta(pipeName).getRuntimeMeta();
// To avoid unnecessary retries, we set the isStoppedByRuntimeException flag to false
runtimeMeta.setIsStoppedByRuntimeException(false);
runtimeMeta.setExceptionsClearTime(System.currentTimeMillis());
final Map<Integer, PipeRuntimeException> exceptionMap =
runtimeMeta.getNodeId2PipeRuntimeExceptionMap();
if (!exceptionMap.isEmpty()) {
exceptionMap.clear();
}
runtimeMeta
.getConsensusGroupId2TaskMetaMap()
.values()
.forEach(
pipeTaskMeta -> {
if (pipeTaskMeta.getExceptionMessages().iterator().hasNext()) {
pipeTaskMeta.clearExceptionMessages();
}
});
}
public void setIsStoppedByRuntimeExceptionToFalse(final String pipeName) {
acquireWriteLock();
try {
setIsStoppedByRuntimeExceptionToFalseInternal(pipeName);
} finally {
releaseWriteLock();
}
}
private void setIsStoppedByRuntimeExceptionToFalseInternal(final String pipeName) {
if (!pipeMetaKeeper.containsPipeMeta(pipeName)) {
return;
}
pipeMetaKeeper.getPipeMeta(pipeName).getRuntimeMeta().setIsStoppedByRuntimeException(false);
}
/**
* Record the exceptions of all pipes locally if they encountered failure when pushing {@link
* PipeMeta}s to dataNodes.
*
* <p>If there are exceptions recorded, the related pipes will be stopped, and the exception
* messages will then be updated to all the nodes through {@link PipeHandleMetaChangeProcedure}.
*
* @param respMap The responseMap after pushing pipe meta
* @return {@code true} if there are exceptions encountered
*/
public boolean recordDataNodePushPipeMetaExceptions(
final Map<Integer, TPushPipeMetaResp> respMap) {
acquireWriteLock();
try {
return recordDataNodePushPipeMetaExceptionsInternal(respMap);
} finally {
releaseWriteLock();
}
}
private boolean recordDataNodePushPipeMetaExceptionsInternal(
final Map<Integer, TPushPipeMetaResp> respMap) {
boolean hasException = false;
for (final Map.Entry<Integer, TPushPipeMetaResp> respEntry : respMap.entrySet()) {
final int dataNodeId = respEntry.getKey();
final TPushPipeMetaResp resp = respEntry.getValue();
if (resp.getStatus().getCode() == TSStatusCode.PIPE_PUSH_META_ERROR.getStatusCode()) {
hasException = true;
if (!resp.isSetExceptionMessages()) {
// The pushPipeMeta process on dataNode encountered internal errors
continue;
}
resp.getExceptionMessages().stream()
.filter(message -> pipeMetaKeeper.containsPipeMeta(message.getPipeName()))
.forEach(
message -> {
final PipeRuntimeMeta runtimeMeta =
pipeMetaKeeper.getPipeMeta(message.getPipeName()).getRuntimeMeta();
// Mark the status of the pipe with exception as stopped
runtimeMeta.getStatus().set(PipeStatus.STOPPED);
runtimeMeta.setIsStoppedByRuntimeException(true);
final Map<Integer, PipeRuntimeException> exceptionMap =
runtimeMeta.getNodeId2PipeRuntimeExceptionMap();
if (!exceptionMap.containsKey(dataNodeId)
|| exceptionMap.get(dataNodeId).getTimeStamp() < message.getTimeStamp()) {
exceptionMap.put(
dataNodeId,
new PipeRuntimeCriticalException(
message.getMessage(), message.getTimeStamp()));
}
});
}
}
return hasException;
}
public boolean autoRestart() {
acquireWriteLock();
try {
return autoRestartInternal();
} finally {
releaseWriteLock();
}
}
/**
* Set the statuses of all the pipes stopped automatically because of critical exceptions to
* {@link PipeStatus#RUNNING} in order to restart them.
*
* @return {@code true} if there are pipes need restarting
*/
private boolean autoRestartInternal() {
final AtomicBoolean needRestart = new AtomicBoolean(false);
final List<String> pipeToRestart = new LinkedList<>();
pipeMetaKeeper
.getPipeMetaList()
.forEach(
pipeMeta -> {
if (pipeMeta.getRuntimeMeta().getIsStoppedByRuntimeException()) {
pipeMeta.getRuntimeMeta().getStatus().set(PipeStatus.RUNNING);
needRestart.set(true);
pipeToRestart.add(pipeMeta.getStaticMeta().getPipeName());
}
});
if (needRestart.get()) {
LOGGER.info("PipeMetaSyncer is trying to restart the pipes: {}", pipeToRestart);
}
return needRestart.get();
}
public void handleSuccessfulRestart() {
acquireWriteLock();
try {
handleSuccessfulRestartInternal();
} finally {
releaseWriteLock();
}
}
/**
* Clear the exceptions to, and set the isAutoStopped flag to false for the successfully restarted
* pipe.
*/
private void handleSuccessfulRestartInternal() {
pipeMetaKeeper
.getPipeMetaList()
.forEach(
pipeMeta -> {
if (pipeMeta.getRuntimeMeta().getStatus().get().equals(PipeStatus.RUNNING)) {
clearExceptionsAndSetIsStoppedByRuntimeExceptionToFalse(
pipeMeta.getStaticMeta().getPipeName());
}
});
}
public void removePipeMeta(final String pipeName) {
acquireWriteLock();
try {
removePipeMetaInternal(pipeName);
} finally {
releaseWriteLock();
}
}
private void removePipeMetaInternal(final String pipeName) {
pipeMetaKeeper.removePipeMeta(pipeName);
}
/////////////////////////////// Snapshot ///////////////////////////////
@Override
public boolean processTakeSnapshot(final File snapshotDir) throws IOException {
acquireReadLock();
try {
final File snapshotFile = new File(snapshotDir, SNAPSHOT_FILE_NAME);
if (snapshotFile.exists() && snapshotFile.isFile()) {
LOGGER.error(
"Failed to take snapshot, because snapshot file [{}] is already exist.",
snapshotFile.getAbsolutePath());
return false;
}
try (final FileOutputStream fileOutputStream = new FileOutputStream(snapshotFile)) {
pipeMetaKeeper.processTakeSnapshot(fileOutputStream);
fileOutputStream.getFD().sync();
}
return true;
} finally {
releaseReadLock();
}
}
@Override
public void processLoadSnapshot(final File snapshotDir) throws IOException {
acquireWriteLock();
try {
final File snapshotFile = new File(snapshotDir, SNAPSHOT_FILE_NAME);
if (!snapshotFile.exists() || !snapshotFile.isFile()) {
LOGGER.error(
"Failed to load snapshot,snapshot file [{}] is not exist.",
snapshotFile.getAbsolutePath());
return;
}
try (final FileInputStream fileInputStream = new FileInputStream(snapshotFile)) {
pipeMetaKeeper.processLoadSnapshot(fileInputStream);
}
} finally {
releaseWriteLock();
}
}
/////////////////////////////// hashCode & equals ///////////////////////////////
@Override
public int hashCode() {
return pipeMetaKeeper.hashCode();
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null || getClass() != obj.getClass()) {
return false;
}
final PipeTaskInfo other = (PipeTaskInfo) obj;
return pipeMetaKeeper.equals(other.pipeMetaKeeper);
}
@Override
public String toString() {
return pipeMetaKeeper.toString();
}
//////////////////////////// APIs provided for metric framework ////////////////////////////
public long runningPipeCount() {
return pipeMetaKeeper.runningPipeCount();
}
public long droppedPipeCount() {
return pipeMetaKeeper.droppedPipeCount();
}
public long userStoppedPipeCount() {
return pipeMetaKeeper.userStoppedPipeCount();
}
public long exceptionStoppedPipeCount() {
return pipeMetaKeeper.exceptionStoppedPipeCount();
}
}
|
apache/jena | 35,787 | jena-core/src/test/java/org/apache/jena/ontology/impl/TestProperty.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Package
///////////////
package org.apache.jena.ontology.impl;
// Imports
///////////////
import java.util.List;
import junit.framework.TestSuite;
import org.apache.jena.ontology.*;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Property;
import org.apache.jena.reasoner.test.TestUtil;
import org.apache.jena.util.FileManager;
import org.apache.jena.vocabulary.RDF;
/**
* <p>
* Unit test cases for the OntProperty class
* </p>
*/
@SuppressWarnings("removal")
public class TestProperty
extends OntTestBase
{
// Constants
//////////////////////////////////
// Static variables
//////////////////////////////////
// Instance variables
//////////////////////////////////
// Constructors
//////////////////////////////////
static public TestSuite suite() {
return new TestProperty( "TestProperty" );
}
public TestProperty( String name ) {
super( name );
}
// External signature methods
//////////////////////////////////
@Override
public OntTestCase[] getTests() {
return new OntTestCase[] {
new OntTestCase( "OntProperty.super-property", true, true, true ) {
@Override
public void ontTest( OntModel m ) {
Profile prof = m.getProfile();
OntProperty p = m.createOntProperty( NS + "p" );
OntProperty q = m.createOntProperty( NS + "q" );
OntProperty r = m.createOntProperty( NS + "r" );
p.addSuperProperty( q );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.SUB_PROPERTY_OF() ) );
assertEquals( "p have super-prop q", q, p.getSuperProperty() );
p.addSuperProperty( r );
assertEquals( "Cardinality should be 2", 2, p.getCardinality( prof.SUB_PROPERTY_OF() ) );
iteratorTest( p.listSuperProperties(), new Object[] {q, r} );
p.setSuperProperty( r );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.SUB_PROPERTY_OF() ) );
assertEquals( "p shuold have super-prop r", r, p.getSuperProperty() );
p.removeSuperProperty( q );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.SUB_PROPERTY_OF() ) );
p.removeSuperProperty( r );
assertEquals( "Cardinality should be 0", 0, p.getCardinality( prof.SUB_PROPERTY_OF() ) );
// for symmetry with listSuperClasses(), exclude the reflexive case
List<? extends OntProperty> sp = p.listSuperProperties().toList();
assertFalse( "super-properties should not include reflexive case", sp.contains( p ) );
}
},
new OntTestCase( "OntProperty.sub-property", true, true, true ) {
@Override
public void ontTest( OntModel m ) {
Profile prof = m.getProfile();
OntProperty p = m.createOntProperty( NS + "p" );
OntProperty q = m.createOntProperty( NS + "q" );
OntProperty r = m.createOntProperty( NS + "r" );
p.addSubProperty( q );
assertEquals( "Cardinality should be 1", 1, q.getCardinality( prof.SUB_PROPERTY_OF() ) );
assertEquals( "p have sub-prop q", q, p.getSubProperty() );
p.addSubProperty( r );
assertEquals( "Cardinality should be 2", 2, q.getCardinality( prof.SUB_PROPERTY_OF() ) + r.getCardinality( prof.SUB_PROPERTY_OF() ) );
iteratorTest( p.listSubProperties(), new Object[] {q, r} );
iteratorTest( q.listSuperProperties(), new Object[] {p} );
iteratorTest( r.listSuperProperties(), new Object[] {p} );
p.setSubProperty( r );
assertEquals( "Cardinality should be 1", 1, q.getCardinality( prof.SUB_PROPERTY_OF() ) + r.getCardinality( prof.SUB_PROPERTY_OF() ) );
assertEquals( "p should have sub-prop r", r, p.getSubProperty() );
p.removeSubProperty( q );
assertTrue( "Should have sub-prop r", p.hasSubProperty( r, false ) );
p.removeSubProperty( r );
assertTrue( "Should not have sub-prop r", !p.hasSubProperty( r, false ) );
}
},
new OntTestCase( "OntProperty.domain", true, true, true ) {
@Override
public void ontTest( OntModel m ) {
Profile prof = m.getProfile();
OntProperty p = m.createOntProperty( NS + "p" );
OntResource a = m.getResource( NS + "a" ).as( OntResource.class );
OntResource b = m.getResource( NS + "b" ).as( OntResource.class );
p.addDomain( a );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.DOMAIN() ) );
assertEquals( "p have domain a", a, p.getDomain() );
p.addDomain( b );
assertEquals( "Cardinality should be 2", 2, p.getCardinality( prof.DOMAIN() ) );
iteratorTest( p.listDomain(), new Object[] {a, b} );
p.setDomain( b );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.DOMAIN() ) );
assertEquals( "p should have domain b", b, p.getDomain() );
p.removeDomain( a );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.DOMAIN() ) );
p.removeDomain( b );
assertEquals( "Cardinality should be 0", 0, p.getCardinality( prof.DOMAIN() ) );
}
},
new OntTestCase( "OntProperty.range", true, true, true ) {
@Override
public void ontTest( OntModel m ) {
Profile prof = m.getProfile();
OntProperty p = m.createOntProperty( NS + "p" );
OntResource a = m.getResource( NS + "a" ).as( OntResource.class );
OntResource b = m.getResource( NS + "b" ).as( OntResource.class );
p.addRange( a );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.RANGE() ) );
assertEquals( "p have range a", a, p.getRange() );
p.addRange( b );
assertEquals( "Cardinality should be 2", 2, p.getCardinality( prof.RANGE() ) );
iteratorTest( p.listRange(), new Object[] {a, b} );
p.setRange( b );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.RANGE() ) );
assertEquals( "p should have range b", b, p.getRange() );
p.removeRange( a );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.RANGE() ) );
p.removeRange( b );
assertEquals( "Cardinality should be 0", 0, p.getCardinality( prof.RANGE() ) );
}
},
new OntTestCase( "OntProperty.equivalentProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
Profile prof = m.getProfile();
OntProperty p = m.createObjectProperty( NS + "p" );
OntProperty q = m.createObjectProperty( NS + "q" );
OntProperty r = m.createObjectProperty( NS + "r" );
p.addEquivalentProperty( q );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.EQUIVALENT_PROPERTY() ) );
assertEquals( "p have equivalentProperty q", q, p.getEquivalentProperty() );
p.addEquivalentProperty( r );
assertEquals( "Cardinality should be 2", 2, p.getCardinality( prof.EQUIVALENT_PROPERTY() ) );
iteratorTest( p.listEquivalentProperties(), new Object[] {q,r} );
p.setEquivalentProperty( r );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.EQUIVALENT_PROPERTY() ) );
assertEquals( "p should have equivalentProperty r", r, p.getEquivalentProperty() );
p.removeEquivalentProperty( q );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.EQUIVALENT_PROPERTY() ) );
p.removeEquivalentProperty( r );
assertEquals( "Cardinality should be 0", 0, p.getCardinality( prof.EQUIVALENT_PROPERTY() ) );
}
},
new OntTestCase( "OntProperty.inverseOf", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
Profile prof = m.getProfile();
OntProperty p = m.createObjectProperty( NS + "p" );
OntProperty q = m.createObjectProperty( NS + "q" );
OntProperty r = m.createObjectProperty( NS + "r" );
assertFalse( p.isInverseOf( q ) );
assertEquals( null, p.getInverseOf() );
p.addInverseOf( q );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.INVERSE_OF() ) );
assertEquals( "p should have inverse q", q, p.getInverseOf() );
assertTrue( "inverse value should be an object property", p.getInverseOf() instanceof ObjectProperty );
assertTrue( "inverse value should be an object property", q.getInverse() instanceof ObjectProperty );
p.addInverseOf( r );
assertEquals( "Cardinality should be 2", 2, p.getCardinality( prof.INVERSE_OF() ) );
iteratorTest( p.listInverseOf(), new Object[] {q,r} );
p.setInverseOf( r );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.INVERSE_OF() ) );
assertEquals( "p should have inverse r", r, p.getInverseOf() );
p.removeInverseProperty( q );
assertEquals( "Cardinality should be 1", 1, p.getCardinality( prof.INVERSE_OF() ) );
p.removeInverseProperty( r );
assertEquals( "Cardinality should be 0", 0, p.getCardinality( prof.INVERSE_OF() ) );
}
},
new OntTestCase( "OntProperty.subproperty.fromFile", true, true, true ) {
@Override
public void ontTest( OntModel m ) {
String lang = m_owlLang ? "owl" : "rdfs";
String fileName = "file:testing/ontology/" + lang + "/Property/test.rdf";
m.read( fileName );
OntProperty p = m.getProperty( NS, "p" ).as( OntProperty.class );
OntProperty q = m.getProperty( NS, "q" ).as( OntProperty.class );
iteratorTest( p.listSuperProperties(), new Object[] {q} );
iteratorTest( q.listSubProperties(), new Object[] {p} );
}
},
new OntTestCase( "OntProperty.domain.fromFile", true, true, true ) {
@Override
public void ontTest( OntModel m ) {
String lang = m_owlLang ? "owl" : "rdfs";
String fileName = "file:testing/ontology/" + lang + "/Property/test.rdf";
m.read( fileName );
OntProperty p = m.getProperty( NS, "p" ).as( OntProperty.class );
OntClass A = m.getResource( NS + "ClassA").as( OntClass.class);
assertTrue( "p should have domain A", p.hasDomain( A ) );
}
},
new OntTestCase( "OntProperty.range.fromFile", true, true, true ) {
@Override
public void ontTest( OntModel m ) {
String lang = m_owlLang ? "owl" : "rdfs";
String fileName = "file:testing/ontology/" + lang + "/Property/test.rdf";
m.read( fileName );
OntProperty p = m.getProperty( NS, "p" ).as( OntProperty.class );
OntClass B = m.getResource( NS + "ClassB").as( OntClass.class);
assertTrue( "p should have domain B", p.hasRange( B ) );
}
},
new OntTestCase( "OntProperty.equivalentProeprty.fromFile", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
String lang = m_owlLang ? "owl" : "rdfs";
String fileName = "file:testing/ontology/" + lang + "/Property/test.rdf";
m.read( fileName );
OntProperty p = m.getProperty( NS, "p" ).as( OntProperty.class );
OntProperty r = m.getProperty( NS, "r" ).as( OntProperty.class );
assertTrue( "p should have equiv prop r", p.hasEquivalentProperty( r ) );
}
},
new OntTestCase( "OntProperty.inversePropertyOf.fromFile", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
String lang = m_owlLang ? "owl" : "rdfs";
String fileName = "file:testing/ontology/" + lang + "/Property/test.rdf";
m.read( fileName );
OntProperty p = m.getProperty( NS, "p" ).as( OntProperty.class );
OntProperty s = m.getProperty( NS, "s" ).as( OntProperty.class );
assertTrue( "p should have inv prop s", p.isInverseOf( s ) );
}
},
// type tests
new OntTestCase( "OntProperty.isFunctionalProperty dt", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
OntProperty p = m.createDatatypeProperty( NS + "p", true );
assertTrue( "isFunctionalProperty not correct", p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {
assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() );
}
}
},
new OntTestCase( "OntProperty.isFunctionalProperty object", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
OntProperty p = m.createObjectProperty( NS + "p", true );
assertTrue( "isFunctionalProperty not correct", p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {
assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() );
}
}
},
new OntTestCase( "OntProperty.isDatatypeProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
OntProperty p = m.createDatatypeProperty( NS + "p", false );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {
assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() );
}
}
},
new OntTestCase( "OntProperty.isObjectProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
OntProperty p = m.createObjectProperty( NS + "p", false );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {
assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() );
}
}
},
new OntTestCase( "OntProperty.isTransitiveProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
OntProperty p = m.createTransitiveProperty( NS + "p" );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() ); // this should be true by entailment, but we have reasoning switched off
assertTrue( "isTransitiveProperty not correct", p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {
assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() );
}
}
},
new OntTestCase( "OntProperty.isInverseFunctionalProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
OntProperty p = m.createInverseFunctionalProperty( NS + "p" );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() ); // this should be true by entailment, but we have reasoning switched off
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", p.isInverseFunctionalProperty() );
if (m_owlLang) {
assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() );
}
}
},
new OntTestCase( "OntProperty.isSymmetricProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
OntProperty p = m.createSymmetricProperty( NS + "p" );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() ); // this should be true by entailment, but we have reasoning switched off
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {
assertTrue( "isSymmetricProperty not correct", p.isSymmetricProperty() );
}
}
},
new OntTestCase( "OntProperty.convertToFunctionalProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
Property pSimple = m.createProperty( NS, "p" );
pSimple.addProperty( RDF.type, RDF.Property );
OntProperty p = pSimple.as( OntProperty.class );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
p = p.convertToFunctionalProperty();
assertTrue( "isFunctionalProperty not correct", p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
}
},
new OntTestCase( "OntProperty.convertToDatatypeProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
Property pSimple = m.createProperty( NS, "p" );
pSimple.addProperty( RDF.type, RDF.Property );
OntProperty p = pSimple.as( OntProperty.class );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
p = p.convertToDatatypeProperty();
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
}
},
new OntTestCase( "OntProperty.convertToObjectProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
Property pSimple = m.createProperty( NS, "p" );
pSimple.addProperty( RDF.type, RDF.Property );
OntProperty p = pSimple.as( OntProperty.class );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
p = p.convertToObjectProperty();
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
}
},
new OntTestCase( "OntProperty.convertToTransitiveProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
Property pSimple = m.createProperty( NS, "p" );
pSimple.addProperty( RDF.type, RDF.Property );
OntProperty p = pSimple.as( OntProperty.class );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
p = p.convertToTransitiveProperty();
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
}
},
new OntTestCase( "OntProperty.convertToInverseFunctionalProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
Property pSimple = m.createProperty( NS, "p" );
pSimple.addProperty( RDF.type, RDF.Property );
OntProperty p = pSimple.as( OntProperty.class );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
p = p.convertToInverseFunctionalProperty();
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
}
},
new OntTestCase( "OntProperty.convertToSymmetricProperty", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
Property pSimple = m.createProperty( NS, "p" );
pSimple.addProperty( RDF.type, RDF.Property );
OntProperty p = pSimple.as( OntProperty.class );
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", !p.isSymmetricProperty() ); }
p = p.convertToSymmetricProperty();
assertTrue( "isFunctionalProperty not correct", !p.isFunctionalProperty() );
assertTrue( "isDatatypeProperty not correct", !p.isDatatypeProperty() );
assertTrue( "isObjectProperty not correct", !p.isObjectProperty() );
assertTrue( "isTransitiveProperty not correct", !p.isTransitiveProperty() );
assertTrue( "isInverseFunctionalProperty not correct", !p.isInverseFunctionalProperty() );
if (m_owlLang) {assertTrue( "isSymmetricProperty not correct", p.isSymmetricProperty() ); }
}
},
new OntTestCase( "ObjectProperty.inverse", true, true, false ) {
@Override
public void ontTest( OntModel m ) {
ObjectProperty p = m.createObjectProperty( NS + "p" );
ObjectProperty q = m.createObjectProperty( NS + "q" );
ObjectProperty r = m.createObjectProperty( NS + "r" );
assertFalse( "No inverse of p", p.hasInverse() );
assertEquals( null, p.getInverse() );
q.addInverseOf( p );
assertTrue( "Inverse of p", p.hasInverse() );
assertEquals( "inverse of p ", q, p.getInverse() );
r.addInverseOf( p );
iteratorTest( p.listInverse(), new Object[] {q,r} );
}
},
new OntTestCase( "OntProperty.listReferringRestrictions", true, true, false ) {
@Override
protected void ontTest( OntModel m ) {
ObjectProperty p = m.createObjectProperty( NS+"p" );
ObjectProperty q = m.createObjectProperty( NS+"q" );
Restriction r0 = m.createCardinalityRestriction( null, p, 2 );
Restriction r1 = m.createCardinalityRestriction( null, p, 3 );
Restriction r2 = m.createCardinalityRestriction( null, q, 2 );
Restriction r3 = m.createCardinalityRestriction( null, q, 3 );
assertTrue( iteratorContains( p.listReferringRestrictions(), r0 ) );
assertTrue( iteratorContains( p.listReferringRestrictions(), r1 ) );
assertFalse( iteratorContains( p.listReferringRestrictions(), r2 ) );
assertFalse( iteratorContains( p.listReferringRestrictions(), r3 ) );
assertNotNull( p.listReferringRestrictions().next() );
}
},
new OntTestCase( "no duplication from imported models", true, true, true ) {
@Override
protected void ontTest( OntModel m ) {
OntModel m0 = ModelFactory.createOntologyModel( OntModelSpec.OWL_DL_MEM_RULE_INF, null );
FileManager.getInternal().readModelInternal( m0, "file:testing/ontology/testImport9/a.ttl" );
OntProperty p0 = m0.getOntProperty( "http://incubator.apache.org/jena/2011/10/testont/b#propB" );
TestUtil.assertIteratorLength( p0.listDomain(), 3 );
// repeat test - thus using previously cached model for import
OntModel m1 = ModelFactory.createOntologyModel( OntModelSpec.OWL_DL_MEM_RULE_INF, null );
FileManager.getInternal().readModelInternal( m1, "file:testing/ontology/testImport9/a.ttl" );
OntProperty p1 = m1.getOntProperty( "http://incubator.apache.org/jena/2011/10/testont/b#propB" );
TestUtil.assertIteratorLength( p1.listDomain(), 3 );
}
}
};
}
// Internal implementation methods
//////////////////////////////////
//==============================================================================
// Inner class definitions
//==============================================================================
}
|
googleapis/google-cloud-java | 35,810 | java-retail/proto-google-cloud-retail-v2beta/src/main/java/com/google/cloud/retail/v2beta/ListGenerativeQuestionConfigsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2beta/generative_question_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2beta;
/**
*
*
* <pre>
* Response for ListQuestions method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse}
*/
public final class ListGenerativeQuestionConfigsResponse
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse)
ListGenerativeQuestionConfigsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListGenerativeQuestionConfigsResponse.newBuilder() to construct.
private ListGenerativeQuestionConfigsResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListGenerativeQuestionConfigsResponse() {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListGenerativeQuestionConfigsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2beta_ListGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2beta_ListGenerativeQuestionConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse.class,
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse.Builder.class);
}
public static final int GENERATIVE_QUESTION_CONFIGS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.retail.v2beta.GenerativeQuestionConfig>
generativeQuestionConfigs_;
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.retail.v2beta.GenerativeQuestionConfig>
getGenerativeQuestionConfigsList() {
return generativeQuestionConfigs_;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsOrBuilderList() {
return generativeQuestionConfigs_;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
@java.lang.Override
public int getGenerativeQuestionConfigsCount() {
return generativeQuestionConfigs_.size();
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.GenerativeQuestionConfig getGenerativeQuestionConfigs(
int index) {
return generativeQuestionConfigs_.get(index);
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder
getGenerativeQuestionConfigsOrBuilder(int index) {
return generativeQuestionConfigs_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < generativeQuestionConfigs_.size(); i++) {
output.writeMessage(1, generativeQuestionConfigs_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < generativeQuestionConfigs_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
1, generativeQuestionConfigs_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse)) {
return super.equals(obj);
}
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse other =
(com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse) obj;
if (!getGenerativeQuestionConfigsList().equals(other.getGenerativeQuestionConfigsList()))
return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getGenerativeQuestionConfigsCount() > 0) {
hash = (37 * hash) + GENERATIVE_QUESTION_CONFIGS_FIELD_NUMBER;
hash = (53 * hash) + getGenerativeQuestionConfigsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListQuestions method.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse)
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2beta.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2beta_ListGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2beta.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2beta_ListGenerativeQuestionConfigsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse.class,
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse.Builder.class);
}
// Construct using
// com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
} else {
generativeQuestionConfigs_ = null;
generativeQuestionConfigsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2beta.GenerativeQuestionServiceProto
.internal_static_google_cloud_retail_v2beta_ListGenerativeQuestionConfigsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse
getDefaultInstanceForType() {
return com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse build() {
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse buildPartial() {
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse result =
new com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse result) {
if (generativeQuestionConfigsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
generativeQuestionConfigs_ =
java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.generativeQuestionConfigs_ = generativeQuestionConfigs_;
} else {
result.generativeQuestionConfigs_ = generativeQuestionConfigsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse) {
return mergeFrom(
(com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse other) {
if (other
== com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse
.getDefaultInstance()) return this;
if (generativeQuestionConfigsBuilder_ == null) {
if (!other.generativeQuestionConfigs_.isEmpty()) {
if (generativeQuestionConfigs_.isEmpty()) {
generativeQuestionConfigs_ = other.generativeQuestionConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.addAll(other.generativeQuestionConfigs_);
}
onChanged();
}
} else {
if (!other.generativeQuestionConfigs_.isEmpty()) {
if (generativeQuestionConfigsBuilder_.isEmpty()) {
generativeQuestionConfigsBuilder_.dispose();
generativeQuestionConfigsBuilder_ = null;
generativeQuestionConfigs_ = other.generativeQuestionConfigs_;
bitField0_ = (bitField0_ & ~0x00000001);
generativeQuestionConfigsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getGenerativeQuestionConfigsFieldBuilder()
: null;
} else {
generativeQuestionConfigsBuilder_.addAllMessages(other.generativeQuestionConfigs_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.retail.v2beta.GenerativeQuestionConfig m =
input.readMessage(
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.parser(),
extensionRegistry);
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(m);
} else {
generativeQuestionConfigsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.retail.v2beta.GenerativeQuestionConfig>
generativeQuestionConfigs_ = java.util.Collections.emptyList();
private void ensureGenerativeQuestionConfigsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
generativeQuestionConfigs_ =
new java.util.ArrayList<com.google.cloud.retail.v2beta.GenerativeQuestionConfig>(
generativeQuestionConfigs_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2beta.GenerativeQuestionConfig,
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder>
generativeQuestionConfigsBuilder_;
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public java.util.List<com.google.cloud.retail.v2beta.GenerativeQuestionConfig>
getGenerativeQuestionConfigsList() {
if (generativeQuestionConfigsBuilder_ == null) {
return java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
} else {
return generativeQuestionConfigsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public int getGenerativeQuestionConfigsCount() {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.size();
} else {
return generativeQuestionConfigsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public com.google.cloud.retail.v2beta.GenerativeQuestionConfig getGenerativeQuestionConfigs(
int index) {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.get(index);
} else {
return generativeQuestionConfigsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder setGenerativeQuestionConfigs(
int index, com.google.cloud.retail.v2beta.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.set(index, value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder setGenerativeQuestionConfigs(
int index,
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.set(index, builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder addGenerativeQuestionConfigs(
com.google.cloud.retail.v2beta.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder addGenerativeQuestionConfigs(
int index, com.google.cloud.retail.v2beta.GenerativeQuestionConfig value) {
if (generativeQuestionConfigsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(index, value);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder addGenerativeQuestionConfigs(
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder addGenerativeQuestionConfigs(
int index,
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder builderForValue) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.add(index, builderForValue.build());
onChanged();
} else {
generativeQuestionConfigsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder addAllGenerativeQuestionConfigs(
java.lang.Iterable<? extends com.google.cloud.retail.v2beta.GenerativeQuestionConfig>
values) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, generativeQuestionConfigs_);
onChanged();
} else {
generativeQuestionConfigsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder clearGenerativeQuestionConfigs() {
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigs_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
generativeQuestionConfigsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public Builder removeGenerativeQuestionConfigs(int index) {
if (generativeQuestionConfigsBuilder_ == null) {
ensureGenerativeQuestionConfigsIsMutable();
generativeQuestionConfigs_.remove(index);
onChanged();
} else {
generativeQuestionConfigsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder
getGenerativeQuestionConfigsBuilder(int index) {
return getGenerativeQuestionConfigsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder
getGenerativeQuestionConfigsOrBuilder(int index) {
if (generativeQuestionConfigsBuilder_ == null) {
return generativeQuestionConfigs_.get(index);
} else {
return generativeQuestionConfigsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public java.util.List<
? extends com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsOrBuilderList() {
if (generativeQuestionConfigsBuilder_ != null) {
return generativeQuestionConfigsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(generativeQuestionConfigs_);
}
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder
addGenerativeQuestionConfigsBuilder() {
return getGenerativeQuestionConfigsFieldBuilder()
.addBuilder(com.google.cloud.retail.v2beta.GenerativeQuestionConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder
addGenerativeQuestionConfigsBuilder(int index) {
return getGenerativeQuestionConfigsFieldBuilder()
.addBuilder(
index, com.google.cloud.retail.v2beta.GenerativeQuestionConfig.getDefaultInstance());
}
/**
*
*
* <pre>
* All the questions for a given catalog.
* </pre>
*
* <code>
* repeated .google.cloud.retail.v2beta.GenerativeQuestionConfig generative_question_configs = 1;
* </code>
*/
public java.util.List<com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder>
getGenerativeQuestionConfigsBuilderList() {
return getGenerativeQuestionConfigsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2beta.GenerativeQuestionConfig,
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder>
getGenerativeQuestionConfigsFieldBuilder() {
if (generativeQuestionConfigsBuilder_ == null) {
generativeQuestionConfigsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.retail.v2beta.GenerativeQuestionConfig,
com.google.cloud.retail.v2beta.GenerativeQuestionConfig.Builder,
com.google.cloud.retail.v2beta.GenerativeQuestionConfigOrBuilder>(
generativeQuestionConfigs_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
generativeQuestionConfigs_ = null;
}
return generativeQuestionConfigsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse)
private static final com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse();
}
public static com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListGenerativeQuestionConfigsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListGenerativeQuestionConfigsResponse>() {
@java.lang.Override
public ListGenerativeQuestionConfigsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListGenerativeQuestionConfigsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListGenerativeQuestionConfigsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2beta.ListGenerativeQuestionConfigsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,814 | java-channel/proto-google-cloud-channel-v1/src/main/java/com/google/cloud/channel/v1/RunReportJobResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/channel/v1/reports_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.channel.v1;
/**
*
*
* <pre>
* Response message for
* [CloudChannelReportsService.RunReportJob][google.cloud.channel.v1.CloudChannelReportsService.RunReportJob].
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.RunReportJobResponse}
*/
@java.lang.Deprecated
public final class RunReportJobResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.channel.v1.RunReportJobResponse)
RunReportJobResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use RunReportJobResponse.newBuilder() to construct.
private RunReportJobResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RunReportJobResponse() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RunReportJobResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_RunReportJobResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_RunReportJobResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.RunReportJobResponse.class,
com.google.cloud.channel.v1.RunReportJobResponse.Builder.class);
}
private int bitField0_;
public static final int REPORT_JOB_FIELD_NUMBER = 1;
private com.google.cloud.channel.v1.ReportJob reportJob_;
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*
* @return Whether the reportJob field is set.
*/
@java.lang.Override
public boolean hasReportJob() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*
* @return The reportJob.
*/
@java.lang.Override
public com.google.cloud.channel.v1.ReportJob getReportJob() {
return reportJob_ == null
? com.google.cloud.channel.v1.ReportJob.getDefaultInstance()
: reportJob_;
}
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*/
@java.lang.Override
public com.google.cloud.channel.v1.ReportJobOrBuilder getReportJobOrBuilder() {
return reportJob_ == null
? com.google.cloud.channel.v1.ReportJob.getDefaultInstance()
: reportJob_;
}
public static final int REPORT_METADATA_FIELD_NUMBER = 2;
private com.google.cloud.channel.v1.ReportResultsMetadata reportMetadata_;
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*
* @return Whether the reportMetadata field is set.
*/
@java.lang.Override
public boolean hasReportMetadata() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*
* @return The reportMetadata.
*/
@java.lang.Override
public com.google.cloud.channel.v1.ReportResultsMetadata getReportMetadata() {
return reportMetadata_ == null
? com.google.cloud.channel.v1.ReportResultsMetadata.getDefaultInstance()
: reportMetadata_;
}
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*/
@java.lang.Override
public com.google.cloud.channel.v1.ReportResultsMetadataOrBuilder getReportMetadataOrBuilder() {
return reportMetadata_ == null
? com.google.cloud.channel.v1.ReportResultsMetadata.getDefaultInstance()
: reportMetadata_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getReportJob());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getReportMetadata());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getReportJob());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getReportMetadata());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.channel.v1.RunReportJobResponse)) {
return super.equals(obj);
}
com.google.cloud.channel.v1.RunReportJobResponse other =
(com.google.cloud.channel.v1.RunReportJobResponse) obj;
if (hasReportJob() != other.hasReportJob()) return false;
if (hasReportJob()) {
if (!getReportJob().equals(other.getReportJob())) return false;
}
if (hasReportMetadata() != other.hasReportMetadata()) return false;
if (hasReportMetadata()) {
if (!getReportMetadata().equals(other.getReportMetadata())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasReportJob()) {
hash = (37 * hash) + REPORT_JOB_FIELD_NUMBER;
hash = (53 * hash) + getReportJob().hashCode();
}
if (hasReportMetadata()) {
hash = (37 * hash) + REPORT_METADATA_FIELD_NUMBER;
hash = (53 * hash) + getReportMetadata().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.channel.v1.RunReportJobResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.channel.v1.RunReportJobResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [CloudChannelReportsService.RunReportJob][google.cloud.channel.v1.CloudChannelReportsService.RunReportJob].
* </pre>
*
* Protobuf type {@code google.cloud.channel.v1.RunReportJobResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.channel.v1.RunReportJobResponse)
com.google.cloud.channel.v1.RunReportJobResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_RunReportJobResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_RunReportJobResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.channel.v1.RunReportJobResponse.class,
com.google.cloud.channel.v1.RunReportJobResponse.Builder.class);
}
// Construct using com.google.cloud.channel.v1.RunReportJobResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getReportJobFieldBuilder();
getReportMetadataFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
reportJob_ = null;
if (reportJobBuilder_ != null) {
reportJobBuilder_.dispose();
reportJobBuilder_ = null;
}
reportMetadata_ = null;
if (reportMetadataBuilder_ != null) {
reportMetadataBuilder_.dispose();
reportMetadataBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.channel.v1.ReportsServiceProto
.internal_static_google_cloud_channel_v1_RunReportJobResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.channel.v1.RunReportJobResponse getDefaultInstanceForType() {
return com.google.cloud.channel.v1.RunReportJobResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.channel.v1.RunReportJobResponse build() {
com.google.cloud.channel.v1.RunReportJobResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.channel.v1.RunReportJobResponse buildPartial() {
com.google.cloud.channel.v1.RunReportJobResponse result =
new com.google.cloud.channel.v1.RunReportJobResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.channel.v1.RunReportJobResponse result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.reportJob_ = reportJobBuilder_ == null ? reportJob_ : reportJobBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.reportMetadata_ =
reportMetadataBuilder_ == null ? reportMetadata_ : reportMetadataBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.channel.v1.RunReportJobResponse) {
return mergeFrom((com.google.cloud.channel.v1.RunReportJobResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.channel.v1.RunReportJobResponse other) {
if (other == com.google.cloud.channel.v1.RunReportJobResponse.getDefaultInstance())
return this;
if (other.hasReportJob()) {
mergeReportJob(other.getReportJob());
}
if (other.hasReportMetadata()) {
mergeReportMetadata(other.getReportMetadata());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getReportJobFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getReportMetadataFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.channel.v1.ReportJob reportJob_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.ReportJob,
com.google.cloud.channel.v1.ReportJob.Builder,
com.google.cloud.channel.v1.ReportJobOrBuilder>
reportJobBuilder_;
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*
* @return Whether the reportJob field is set.
*/
public boolean hasReportJob() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*
* @return The reportJob.
*/
public com.google.cloud.channel.v1.ReportJob getReportJob() {
if (reportJobBuilder_ == null) {
return reportJob_ == null
? com.google.cloud.channel.v1.ReportJob.getDefaultInstance()
: reportJob_;
} else {
return reportJobBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*/
public Builder setReportJob(com.google.cloud.channel.v1.ReportJob value) {
if (reportJobBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
reportJob_ = value;
} else {
reportJobBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*/
public Builder setReportJob(com.google.cloud.channel.v1.ReportJob.Builder builderForValue) {
if (reportJobBuilder_ == null) {
reportJob_ = builderForValue.build();
} else {
reportJobBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*/
public Builder mergeReportJob(com.google.cloud.channel.v1.ReportJob value) {
if (reportJobBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& reportJob_ != null
&& reportJob_ != com.google.cloud.channel.v1.ReportJob.getDefaultInstance()) {
getReportJobBuilder().mergeFrom(value);
} else {
reportJob_ = value;
}
} else {
reportJobBuilder_.mergeFrom(value);
}
if (reportJob_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*/
public Builder clearReportJob() {
bitField0_ = (bitField0_ & ~0x00000001);
reportJob_ = null;
if (reportJobBuilder_ != null) {
reportJobBuilder_.dispose();
reportJobBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*/
public com.google.cloud.channel.v1.ReportJob.Builder getReportJobBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getReportJobFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*/
public com.google.cloud.channel.v1.ReportJobOrBuilder getReportJobOrBuilder() {
if (reportJobBuilder_ != null) {
return reportJobBuilder_.getMessageOrBuilder();
} else {
return reportJob_ == null
? com.google.cloud.channel.v1.ReportJob.getDefaultInstance()
: reportJob_;
}
}
/**
*
*
* <pre>
* Pass `report_job.name` to
* [FetchReportResultsRequest.report_job][google.cloud.channel.v1.FetchReportResultsRequest.report_job]
* to retrieve the report's results.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportJob report_job = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.ReportJob,
com.google.cloud.channel.v1.ReportJob.Builder,
com.google.cloud.channel.v1.ReportJobOrBuilder>
getReportJobFieldBuilder() {
if (reportJobBuilder_ == null) {
reportJobBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.ReportJob,
com.google.cloud.channel.v1.ReportJob.Builder,
com.google.cloud.channel.v1.ReportJobOrBuilder>(
getReportJob(), getParentForChildren(), isClean());
reportJob_ = null;
}
return reportJobBuilder_;
}
private com.google.cloud.channel.v1.ReportResultsMetadata reportMetadata_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.ReportResultsMetadata,
com.google.cloud.channel.v1.ReportResultsMetadata.Builder,
com.google.cloud.channel.v1.ReportResultsMetadataOrBuilder>
reportMetadataBuilder_;
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*
* @return Whether the reportMetadata field is set.
*/
public boolean hasReportMetadata() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*
* @return The reportMetadata.
*/
public com.google.cloud.channel.v1.ReportResultsMetadata getReportMetadata() {
if (reportMetadataBuilder_ == null) {
return reportMetadata_ == null
? com.google.cloud.channel.v1.ReportResultsMetadata.getDefaultInstance()
: reportMetadata_;
} else {
return reportMetadataBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*/
public Builder setReportMetadata(com.google.cloud.channel.v1.ReportResultsMetadata value) {
if (reportMetadataBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
reportMetadata_ = value;
} else {
reportMetadataBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*/
public Builder setReportMetadata(
com.google.cloud.channel.v1.ReportResultsMetadata.Builder builderForValue) {
if (reportMetadataBuilder_ == null) {
reportMetadata_ = builderForValue.build();
} else {
reportMetadataBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*/
public Builder mergeReportMetadata(com.google.cloud.channel.v1.ReportResultsMetadata value) {
if (reportMetadataBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& reportMetadata_ != null
&& reportMetadata_
!= com.google.cloud.channel.v1.ReportResultsMetadata.getDefaultInstance()) {
getReportMetadataBuilder().mergeFrom(value);
} else {
reportMetadata_ = value;
}
} else {
reportMetadataBuilder_.mergeFrom(value);
}
if (reportMetadata_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*/
public Builder clearReportMetadata() {
bitField0_ = (bitField0_ & ~0x00000002);
reportMetadata_ = null;
if (reportMetadataBuilder_ != null) {
reportMetadataBuilder_.dispose();
reportMetadataBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*/
public com.google.cloud.channel.v1.ReportResultsMetadata.Builder getReportMetadataBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getReportMetadataFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*/
public com.google.cloud.channel.v1.ReportResultsMetadataOrBuilder getReportMetadataOrBuilder() {
if (reportMetadataBuilder_ != null) {
return reportMetadataBuilder_.getMessageOrBuilder();
} else {
return reportMetadata_ == null
? com.google.cloud.channel.v1.ReportResultsMetadata.getDefaultInstance()
: reportMetadata_;
}
}
/**
*
*
* <pre>
* The metadata for the report's results (display name, columns, row count,
* and date range). If you view this before the operation finishes,
* you may see incomplete data.
* </pre>
*
* <code>.google.cloud.channel.v1.ReportResultsMetadata report_metadata = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.ReportResultsMetadata,
com.google.cloud.channel.v1.ReportResultsMetadata.Builder,
com.google.cloud.channel.v1.ReportResultsMetadataOrBuilder>
getReportMetadataFieldBuilder() {
if (reportMetadataBuilder_ == null) {
reportMetadataBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.channel.v1.ReportResultsMetadata,
com.google.cloud.channel.v1.ReportResultsMetadata.Builder,
com.google.cloud.channel.v1.ReportResultsMetadataOrBuilder>(
getReportMetadata(), getParentForChildren(), isClean());
reportMetadata_ = null;
}
return reportMetadataBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.channel.v1.RunReportJobResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.channel.v1.RunReportJobResponse)
private static final com.google.cloud.channel.v1.RunReportJobResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.channel.v1.RunReportJobResponse();
}
public static com.google.cloud.channel.v1.RunReportJobResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RunReportJobResponse> PARSER =
new com.google.protobuf.AbstractParser<RunReportJobResponse>() {
@java.lang.Override
public RunReportJobResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RunReportJobResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RunReportJobResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.channel.v1.RunReportJobResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,813 | java-security-private-ca/proto-google-cloud-security-private-ca-v1beta1/src/main/java/com/google/cloud/security/privateca/v1beta1/ScheduleDeleteCertificateAuthorityRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/security/privateca/v1beta1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.security.privateca.v1beta1;
/**
*
*
* <pre>
* Request message for
* [CertificateAuthorityService.ScheduleDeleteCertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthorityService.ScheduleDeleteCertificateAuthority].
* </pre>
*
* Protobuf type {@code
* google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest}
*/
public final class ScheduleDeleteCertificateAuthorityRequest
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest)
ScheduleDeleteCertificateAuthorityRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ScheduleDeleteCertificateAuthorityRequest.newBuilder() to construct.
private ScheduleDeleteCertificateAuthorityRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ScheduleDeleteCertificateAuthorityRequest() {
name_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ScheduleDeleteCertificateAuthorityRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.security.privateca.v1beta1.PrivateCaProto
.internal_static_google_cloud_security_privateca_v1beta1_ScheduleDeleteCertificateAuthorityRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.security.privateca.v1beta1.PrivateCaProto
.internal_static_google_cloud_security_privateca_v1beta1_ScheduleDeleteCertificateAuthorityRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
.class,
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
.Builder.class);
}
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The resource name for this [CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority] in the
* format `projects/*/locations/*/certificateAuthorities/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The resource name for this [CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority] in the
* format `projects/*/locations/*/certificateAuthorities/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REQUEST_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has
* already been completed. The server will guarantee that for at least 60
* minutes since the first request.
*
* For example, consider a situation where you make an initial request and t
* he request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. An ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has
* already been completed. The server will guarantee that for at least 60
* minutes since the first request.
*
* For example, consider a situation where you make an initial request and t
* he request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, requestId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, requestId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest)) {
return super.equals(obj);
}
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest other =
(com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getRequestId().equals(other.getRequestId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [CertificateAuthorityService.ScheduleDeleteCertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthorityService.ScheduleDeleteCertificateAuthority].
* </pre>
*
* Protobuf type {@code
* google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest)
com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.security.privateca.v1beta1.PrivateCaProto
.internal_static_google_cloud_security_privateca_v1beta1_ScheduleDeleteCertificateAuthorityRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.security.privateca.v1beta1.PrivateCaProto
.internal_static_google_cloud_security_privateca_v1beta1_ScheduleDeleteCertificateAuthorityRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
.class,
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
.Builder.class);
}
// Construct using
// com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.security.privateca.v1beta1.PrivateCaProto
.internal_static_google_cloud_security_privateca_v1beta1_ScheduleDeleteCertificateAuthorityRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
getDefaultInstanceForType() {
return com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
build() {
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
buildPartial() {
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest result =
new com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest(
this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.requestId_ = requestId_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest) {
return mergeFrom(
(com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
other) {
if (other
== com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The resource name for this [CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority] in the
* format `projects/*/locations/*/certificateAuthorities/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name for this [CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority] in the
* format `projects/*/locations/*/certificateAuthorities/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The resource name for this [CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority] in the
* format `projects/*/locations/*/certificateAuthorities/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name for this [CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority] in the
* format `projects/*/locations/*/certificateAuthorities/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The resource name for this [CertificateAuthority][google.cloud.security.privateca.v1beta1.CertificateAuthority] in the
* format `projects/*/locations/*/certificateAuthorities/*`.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* Optional. An ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has
* already been completed. The server will guarantee that for at least 60
* minutes since the first request.
*
* For example, consider a situation where you make an initial request and t
* he request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. An ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has
* already been completed. The server will guarantee that for at least 60
* minutes since the first request.
*
* For example, consider a situation where you make an initial request and t
* he request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. An ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has
* already been completed. The server will guarantee that for at least 60
* minutes since the first request.
*
* For example, consider a situation where you make an initial request and t
* he request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has
* already been completed. The server will guarantee that for at least 60
* minutes since the first request.
*
* For example, consider a situation where you make an initial request and t
* he request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. An ID to identify requests. Specify a unique request ID so that if you must
* retry your request, the server will know to ignore the request if it has
* already been completed. The server will guarantee that for at least 60
* minutes since the first request.
*
* For example, consider a situation where you make an initial request and t
* he request times out. If you make the request again with the same request
* ID, the server can check if original operation with the same request ID
* was received, and if so, will ignore the second request. This prevents
* clients from accidentally creating duplicate commitments.
*
* The request ID must be a valid UUID with the exception that zero UUID is
* not supported (00000000-0000-0000-0000-000000000000).
* </pre>
*
* <code>string request_id = 2 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest)
private static final com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest();
}
public static com.google.cloud.security.privateca.v1beta1
.ScheduleDeleteCertificateAuthorityRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ScheduleDeleteCertificateAuthorityRequest>
PARSER =
new com.google.protobuf.AbstractParser<ScheduleDeleteCertificateAuthorityRequest>() {
@java.lang.Override
public ScheduleDeleteCertificateAuthorityRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ScheduleDeleteCertificateAuthorityRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ScheduleDeleteCertificateAuthorityRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.security.privateca.v1beta1.ScheduleDeleteCertificateAuthorityRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,910 | java-kms/proto-google-cloud-kms-v1/src/main/java/com/google/cloud/kms/v1/ChecksummedData.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/kms/v1/resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.kms.v1;
/**
*
*
* <pre>
* Data with integrity verification field.
* </pre>
*
* Protobuf type {@code google.cloud.kms.v1.ChecksummedData}
*/
public final class ChecksummedData extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.kms.v1.ChecksummedData)
ChecksummedDataOrBuilder {
private static final long serialVersionUID = 0L;
// Use ChecksummedData.newBuilder() to construct.
private ChecksummedData(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ChecksummedData() {
data_ = com.google.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ChecksummedData();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.kms.v1.KmsResourcesProto
.internal_static_google_cloud_kms_v1_ChecksummedData_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.kms.v1.KmsResourcesProto
.internal_static_google_cloud_kms_v1_ChecksummedData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.kms.v1.ChecksummedData.class,
com.google.cloud.kms.v1.ChecksummedData.Builder.class);
}
private int bitField0_;
public static final int DATA_FIELD_NUMBER = 3;
private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* Raw Data.
* </pre>
*
* <code>bytes data = 3;</code>
*
* @return The data.
*/
@java.lang.Override
public com.google.protobuf.ByteString getData() {
return data_;
}
public static final int CRC32C_CHECKSUM_FIELD_NUMBER = 2;
private com.google.protobuf.Int64Value crc32CChecksum_;
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*
* @return Whether the crc32cChecksum field is set.
*/
@java.lang.Override
public boolean hasCrc32CChecksum() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*
* @return The crc32cChecksum.
*/
@java.lang.Override
public com.google.protobuf.Int64Value getCrc32CChecksum() {
return crc32CChecksum_ == null
? com.google.protobuf.Int64Value.getDefaultInstance()
: crc32CChecksum_;
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.Int64ValueOrBuilder getCrc32CChecksumOrBuilder() {
return crc32CChecksum_ == null
? com.google.protobuf.Int64Value.getDefaultInstance()
: crc32CChecksum_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getCrc32CChecksum());
}
if (!data_.isEmpty()) {
output.writeBytes(3, data_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getCrc32CChecksum());
}
if (!data_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream.computeBytesSize(3, data_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.kms.v1.ChecksummedData)) {
return super.equals(obj);
}
com.google.cloud.kms.v1.ChecksummedData other = (com.google.cloud.kms.v1.ChecksummedData) obj;
if (!getData().equals(other.getData())) return false;
if (hasCrc32CChecksum() != other.hasCrc32CChecksum()) return false;
if (hasCrc32CChecksum()) {
if (!getCrc32CChecksum().equals(other.getCrc32CChecksum())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DATA_FIELD_NUMBER;
hash = (53 * hash) + getData().hashCode();
if (hasCrc32CChecksum()) {
hash = (37 * hash) + CRC32C_CHECKSUM_FIELD_NUMBER;
hash = (53 * hash) + getCrc32CChecksum().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.kms.v1.ChecksummedData parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.ChecksummedData parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.ChecksummedData parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.ChecksummedData parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.ChecksummedData parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.ChecksummedData parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.ChecksummedData parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.ChecksummedData parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.kms.v1.ChecksummedData parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.ChecksummedData parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.kms.v1.ChecksummedData parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.ChecksummedData parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.kms.v1.ChecksummedData prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Data with integrity verification field.
* </pre>
*
* Protobuf type {@code google.cloud.kms.v1.ChecksummedData}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.kms.v1.ChecksummedData)
com.google.cloud.kms.v1.ChecksummedDataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.kms.v1.KmsResourcesProto
.internal_static_google_cloud_kms_v1_ChecksummedData_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.kms.v1.KmsResourcesProto
.internal_static_google_cloud_kms_v1_ChecksummedData_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.kms.v1.ChecksummedData.class,
com.google.cloud.kms.v1.ChecksummedData.Builder.class);
}
// Construct using com.google.cloud.kms.v1.ChecksummedData.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCrc32CChecksumFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
data_ = com.google.protobuf.ByteString.EMPTY;
crc32CChecksum_ = null;
if (crc32CChecksumBuilder_ != null) {
crc32CChecksumBuilder_.dispose();
crc32CChecksumBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.kms.v1.KmsResourcesProto
.internal_static_google_cloud_kms_v1_ChecksummedData_descriptor;
}
@java.lang.Override
public com.google.cloud.kms.v1.ChecksummedData getDefaultInstanceForType() {
return com.google.cloud.kms.v1.ChecksummedData.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.kms.v1.ChecksummedData build() {
com.google.cloud.kms.v1.ChecksummedData result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.kms.v1.ChecksummedData buildPartial() {
com.google.cloud.kms.v1.ChecksummedData result =
new com.google.cloud.kms.v1.ChecksummedData(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.kms.v1.ChecksummedData result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.data_ = data_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.crc32CChecksum_ =
crc32CChecksumBuilder_ == null ? crc32CChecksum_ : crc32CChecksumBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.kms.v1.ChecksummedData) {
return mergeFrom((com.google.cloud.kms.v1.ChecksummedData) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.kms.v1.ChecksummedData other) {
if (other == com.google.cloud.kms.v1.ChecksummedData.getDefaultInstance()) return this;
if (other.getData() != com.google.protobuf.ByteString.EMPTY) {
setData(other.getData());
}
if (other.hasCrc32CChecksum()) {
mergeCrc32CChecksum(other.getCrc32CChecksum());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
input.readMessage(getCrc32CChecksumFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
data_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* Raw Data.
* </pre>
*
* <code>bytes data = 3;</code>
*
* @return The data.
*/
@java.lang.Override
public com.google.protobuf.ByteString getData() {
return data_;
}
/**
*
*
* <pre>
* Raw Data.
* </pre>
*
* <code>bytes data = 3;</code>
*
* @param value The data to set.
* @return This builder for chaining.
*/
public Builder setData(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
data_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Raw Data.
* </pre>
*
* <code>bytes data = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearData() {
bitField0_ = (bitField0_ & ~0x00000001);
data_ = getDefaultInstance().getData();
onChanged();
return this;
}
private com.google.protobuf.Int64Value crc32CChecksum_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Int64Value,
com.google.protobuf.Int64Value.Builder,
com.google.protobuf.Int64ValueOrBuilder>
crc32CChecksumBuilder_;
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*
* @return Whether the crc32cChecksum field is set.
*/
public boolean hasCrc32CChecksum() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*
* @return The crc32cChecksum.
*/
public com.google.protobuf.Int64Value getCrc32CChecksum() {
if (crc32CChecksumBuilder_ == null) {
return crc32CChecksum_ == null
? com.google.protobuf.Int64Value.getDefaultInstance()
: crc32CChecksum_;
} else {
return crc32CChecksumBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*/
public Builder setCrc32CChecksum(com.google.protobuf.Int64Value value) {
if (crc32CChecksumBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
crc32CChecksum_ = value;
} else {
crc32CChecksumBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*/
public Builder setCrc32CChecksum(com.google.protobuf.Int64Value.Builder builderForValue) {
if (crc32CChecksumBuilder_ == null) {
crc32CChecksum_ = builderForValue.build();
} else {
crc32CChecksumBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*/
public Builder mergeCrc32CChecksum(com.google.protobuf.Int64Value value) {
if (crc32CChecksumBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& crc32CChecksum_ != null
&& crc32CChecksum_ != com.google.protobuf.Int64Value.getDefaultInstance()) {
getCrc32CChecksumBuilder().mergeFrom(value);
} else {
crc32CChecksum_ = value;
}
} else {
crc32CChecksumBuilder_.mergeFrom(value);
}
if (crc32CChecksum_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*/
public Builder clearCrc32CChecksum() {
bitField0_ = (bitField0_ & ~0x00000002);
crc32CChecksum_ = null;
if (crc32CChecksumBuilder_ != null) {
crc32CChecksumBuilder_.dispose();
crc32CChecksumBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*/
public com.google.protobuf.Int64Value.Builder getCrc32CChecksumBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getCrc32CChecksumFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*/
public com.google.protobuf.Int64ValueOrBuilder getCrc32CChecksumOrBuilder() {
if (crc32CChecksumBuilder_ != null) {
return crc32CChecksumBuilder_.getMessageOrBuilder();
} else {
return crc32CChecksum_ == null
? com.google.protobuf.Int64Value.getDefaultInstance()
: crc32CChecksum_;
}
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C
* checksum of the returned
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data]. An
* integrity check of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] can be
* performed by computing the CRC32C checksum of
* [ChecksummedData.data][google.cloud.kms.v1.ChecksummedData.data] and
* comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed `2^32-1`, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value crc32c_checksum = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Int64Value,
com.google.protobuf.Int64Value.Builder,
com.google.protobuf.Int64ValueOrBuilder>
getCrc32CChecksumFieldBuilder() {
if (crc32CChecksumBuilder_ == null) {
crc32CChecksumBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Int64Value,
com.google.protobuf.Int64Value.Builder,
com.google.protobuf.Int64ValueOrBuilder>(
getCrc32CChecksum(), getParentForChildren(), isClean());
crc32CChecksum_ = null;
}
return crc32CChecksumBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.kms.v1.ChecksummedData)
}
// @@protoc_insertion_point(class_scope:google.cloud.kms.v1.ChecksummedData)
private static final com.google.cloud.kms.v1.ChecksummedData DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.kms.v1.ChecksummedData();
}
public static com.google.cloud.kms.v1.ChecksummedData getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ChecksummedData> PARSER =
new com.google.protobuf.AbstractParser<ChecksummedData>() {
@java.lang.Override
public ChecksummedData parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ChecksummedData> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ChecksummedData> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.kms.v1.ChecksummedData getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,717 | java-vision/proto-google-cloud-vision-v1/src/main/java/com/google/cloud/vision/v1/ListProductSetsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1/product_search_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.vision.v1;
/**
*
*
* <pre>
* Response message for the `ListProductSets` method.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1.ListProductSetsResponse}
*/
public final class ListProductSetsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1.ListProductSetsResponse)
ListProductSetsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListProductSetsResponse.newBuilder() to construct.
private ListProductSetsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListProductSetsResponse() {
productSets_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListProductSetsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1_ListProductSetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1_ListProductSetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1.ListProductSetsResponse.class,
com.google.cloud.vision.v1.ListProductSetsResponse.Builder.class);
}
public static final int PRODUCT_SETS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.vision.v1.ProductSet> productSets_;
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.vision.v1.ProductSet> getProductSetsList() {
return productSets_;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.vision.v1.ProductSetOrBuilder>
getProductSetsOrBuilderList() {
return productSets_;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
@java.lang.Override
public int getProductSetsCount() {
return productSets_.size();
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.vision.v1.ProductSet getProductSets(int index) {
return productSets_.get(index);
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
@java.lang.Override
public com.google.cloud.vision.v1.ProductSetOrBuilder getProductSetsOrBuilder(int index) {
return productSets_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < productSets_.size(); i++) {
output.writeMessage(1, productSets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < productSets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, productSets_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1.ListProductSetsResponse)) {
return super.equals(obj);
}
com.google.cloud.vision.v1.ListProductSetsResponse other =
(com.google.cloud.vision.v1.ListProductSetsResponse) obj;
if (!getProductSetsList().equals(other.getProductSetsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getProductSetsCount() > 0) {
hash = (37 * hash) + PRODUCT_SETS_FIELD_NUMBER;
hash = (53 * hash) + getProductSetsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1.ListProductSetsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vision.v1.ListProductSetsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the `ListProductSets` method.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1.ListProductSetsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1.ListProductSetsResponse)
com.google.cloud.vision.v1.ListProductSetsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1_ListProductSetsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1_ListProductSetsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1.ListProductSetsResponse.class,
com.google.cloud.vision.v1.ListProductSetsResponse.Builder.class);
}
// Construct using com.google.cloud.vision.v1.ListProductSetsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (productSetsBuilder_ == null) {
productSets_ = java.util.Collections.emptyList();
} else {
productSets_ = null;
productSetsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vision.v1.ProductSearchServiceProto
.internal_static_google_cloud_vision_v1_ListProductSetsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.vision.v1.ListProductSetsResponse getDefaultInstanceForType() {
return com.google.cloud.vision.v1.ListProductSetsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vision.v1.ListProductSetsResponse build() {
com.google.cloud.vision.v1.ListProductSetsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vision.v1.ListProductSetsResponse buildPartial() {
com.google.cloud.vision.v1.ListProductSetsResponse result =
new com.google.cloud.vision.v1.ListProductSetsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.vision.v1.ListProductSetsResponse result) {
if (productSetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
productSets_ = java.util.Collections.unmodifiableList(productSets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.productSets_ = productSets_;
} else {
result.productSets_ = productSetsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.vision.v1.ListProductSetsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1.ListProductSetsResponse) {
return mergeFrom((com.google.cloud.vision.v1.ListProductSetsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vision.v1.ListProductSetsResponse other) {
if (other == com.google.cloud.vision.v1.ListProductSetsResponse.getDefaultInstance())
return this;
if (productSetsBuilder_ == null) {
if (!other.productSets_.isEmpty()) {
if (productSets_.isEmpty()) {
productSets_ = other.productSets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureProductSetsIsMutable();
productSets_.addAll(other.productSets_);
}
onChanged();
}
} else {
if (!other.productSets_.isEmpty()) {
if (productSetsBuilder_.isEmpty()) {
productSetsBuilder_.dispose();
productSetsBuilder_ = null;
productSets_ = other.productSets_;
bitField0_ = (bitField0_ & ~0x00000001);
productSetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getProductSetsFieldBuilder()
: null;
} else {
productSetsBuilder_.addAllMessages(other.productSets_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.vision.v1.ProductSet m =
input.readMessage(
com.google.cloud.vision.v1.ProductSet.parser(), extensionRegistry);
if (productSetsBuilder_ == null) {
ensureProductSetsIsMutable();
productSets_.add(m);
} else {
productSetsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.vision.v1.ProductSet> productSets_ =
java.util.Collections.emptyList();
private void ensureProductSetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
productSets_ = new java.util.ArrayList<com.google.cloud.vision.v1.ProductSet>(productSets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1.ProductSet,
com.google.cloud.vision.v1.ProductSet.Builder,
com.google.cloud.vision.v1.ProductSetOrBuilder>
productSetsBuilder_;
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public java.util.List<com.google.cloud.vision.v1.ProductSet> getProductSetsList() {
if (productSetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(productSets_);
} else {
return productSetsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public int getProductSetsCount() {
if (productSetsBuilder_ == null) {
return productSets_.size();
} else {
return productSetsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public com.google.cloud.vision.v1.ProductSet getProductSets(int index) {
if (productSetsBuilder_ == null) {
return productSets_.get(index);
} else {
return productSetsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public Builder setProductSets(int index, com.google.cloud.vision.v1.ProductSet value) {
if (productSetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProductSetsIsMutable();
productSets_.set(index, value);
onChanged();
} else {
productSetsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public Builder setProductSets(
int index, com.google.cloud.vision.v1.ProductSet.Builder builderForValue) {
if (productSetsBuilder_ == null) {
ensureProductSetsIsMutable();
productSets_.set(index, builderForValue.build());
onChanged();
} else {
productSetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public Builder addProductSets(com.google.cloud.vision.v1.ProductSet value) {
if (productSetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProductSetsIsMutable();
productSets_.add(value);
onChanged();
} else {
productSetsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public Builder addProductSets(int index, com.google.cloud.vision.v1.ProductSet value) {
if (productSetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureProductSetsIsMutable();
productSets_.add(index, value);
onChanged();
} else {
productSetsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public Builder addProductSets(com.google.cloud.vision.v1.ProductSet.Builder builderForValue) {
if (productSetsBuilder_ == null) {
ensureProductSetsIsMutable();
productSets_.add(builderForValue.build());
onChanged();
} else {
productSetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public Builder addProductSets(
int index, com.google.cloud.vision.v1.ProductSet.Builder builderForValue) {
if (productSetsBuilder_ == null) {
ensureProductSetsIsMutable();
productSets_.add(index, builderForValue.build());
onChanged();
} else {
productSetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public Builder addAllProductSets(
java.lang.Iterable<? extends com.google.cloud.vision.v1.ProductSet> values) {
if (productSetsBuilder_ == null) {
ensureProductSetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, productSets_);
onChanged();
} else {
productSetsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public Builder clearProductSets() {
if (productSetsBuilder_ == null) {
productSets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
productSetsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public Builder removeProductSets(int index) {
if (productSetsBuilder_ == null) {
ensureProductSetsIsMutable();
productSets_.remove(index);
onChanged();
} else {
productSetsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public com.google.cloud.vision.v1.ProductSet.Builder getProductSetsBuilder(int index) {
return getProductSetsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public com.google.cloud.vision.v1.ProductSetOrBuilder getProductSetsOrBuilder(int index) {
if (productSetsBuilder_ == null) {
return productSets_.get(index);
} else {
return productSetsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public java.util.List<? extends com.google.cloud.vision.v1.ProductSetOrBuilder>
getProductSetsOrBuilderList() {
if (productSetsBuilder_ != null) {
return productSetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(productSets_);
}
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public com.google.cloud.vision.v1.ProductSet.Builder addProductSetsBuilder() {
return getProductSetsFieldBuilder()
.addBuilder(com.google.cloud.vision.v1.ProductSet.getDefaultInstance());
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public com.google.cloud.vision.v1.ProductSet.Builder addProductSetsBuilder(int index) {
return getProductSetsFieldBuilder()
.addBuilder(index, com.google.cloud.vision.v1.ProductSet.getDefaultInstance());
}
/**
*
*
* <pre>
* List of ProductSets.
* </pre>
*
* <code>repeated .google.cloud.vision.v1.ProductSet product_sets = 1;</code>
*/
public java.util.List<com.google.cloud.vision.v1.ProductSet.Builder>
getProductSetsBuilderList() {
return getProductSetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1.ProductSet,
com.google.cloud.vision.v1.ProductSet.Builder,
com.google.cloud.vision.v1.ProductSetOrBuilder>
getProductSetsFieldBuilder() {
if (productSetsBuilder_ == null) {
productSetsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.vision.v1.ProductSet,
com.google.cloud.vision.v1.ProductSet.Builder,
com.google.cloud.vision.v1.ProductSetOrBuilder>(
productSets_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
productSets_ = null;
}
return productSetsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no more
* results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1.ListProductSetsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1.ListProductSetsResponse)
private static final com.google.cloud.vision.v1.ListProductSetsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1.ListProductSetsResponse();
}
public static com.google.cloud.vision.v1.ListProductSetsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListProductSetsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListProductSetsResponse>() {
@java.lang.Override
public ListProductSetsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListProductSetsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListProductSetsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vision.v1.ListProductSetsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hive | 35,814 | standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/WMNullablePool.java | /**
* Autogenerated by Thrift Compiler (0.16.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.hadoop.hive.metastore.api;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)")
@org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class WMNullablePool implements org.apache.thrift.TBase<WMNullablePool, WMNullablePool._Fields>, java.io.Serializable, Cloneable, Comparable<WMNullablePool> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("WMNullablePool");
private static final org.apache.thrift.protocol.TField RESOURCE_PLAN_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("resourcePlanName", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.protocol.TField POOL_PATH_FIELD_DESC = new org.apache.thrift.protocol.TField("poolPath", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final org.apache.thrift.protocol.TField ALLOC_FRACTION_FIELD_DESC = new org.apache.thrift.protocol.TField("allocFraction", org.apache.thrift.protocol.TType.DOUBLE, (short)3);
private static final org.apache.thrift.protocol.TField QUERY_PARALLELISM_FIELD_DESC = new org.apache.thrift.protocol.TField("queryParallelism", org.apache.thrift.protocol.TType.I32, (short)4);
private static final org.apache.thrift.protocol.TField SCHEDULING_POLICY_FIELD_DESC = new org.apache.thrift.protocol.TField("schedulingPolicy", org.apache.thrift.protocol.TType.STRING, (short)5);
private static final org.apache.thrift.protocol.TField IS_SET_SCHEDULING_POLICY_FIELD_DESC = new org.apache.thrift.protocol.TField("isSetSchedulingPolicy", org.apache.thrift.protocol.TType.BOOL, (short)6);
private static final org.apache.thrift.protocol.TField NS_FIELD_DESC = new org.apache.thrift.protocol.TField("ns", org.apache.thrift.protocol.TType.STRING, (short)7);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new WMNullablePoolStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new WMNullablePoolTupleSchemeFactory();
private @org.apache.thrift.annotation.Nullable java.lang.String resourcePlanName; // required
private @org.apache.thrift.annotation.Nullable java.lang.String poolPath; // required
private double allocFraction; // optional
private int queryParallelism; // optional
private @org.apache.thrift.annotation.Nullable java.lang.String schedulingPolicy; // optional
private boolean isSetSchedulingPolicy; // optional
private @org.apache.thrift.annotation.Nullable java.lang.String ns; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
RESOURCE_PLAN_NAME((short)1, "resourcePlanName"),
POOL_PATH((short)2, "poolPath"),
ALLOC_FRACTION((short)3, "allocFraction"),
QUERY_PARALLELISM((short)4, "queryParallelism"),
SCHEDULING_POLICY((short)5, "schedulingPolicy"),
IS_SET_SCHEDULING_POLICY((short)6, "isSetSchedulingPolicy"),
NS((short)7, "ns");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // RESOURCE_PLAN_NAME
return RESOURCE_PLAN_NAME;
case 2: // POOL_PATH
return POOL_PATH;
case 3: // ALLOC_FRACTION
return ALLOC_FRACTION;
case 4: // QUERY_PARALLELISM
return QUERY_PARALLELISM;
case 5: // SCHEDULING_POLICY
return SCHEDULING_POLICY;
case 6: // IS_SET_SCHEDULING_POLICY
return IS_SET_SCHEDULING_POLICY;
case 7: // NS
return NS;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __ALLOCFRACTION_ISSET_ID = 0;
private static final int __QUERYPARALLELISM_ISSET_ID = 1;
private static final int __ISSETSCHEDULINGPOLICY_ISSET_ID = 2;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.ALLOC_FRACTION,_Fields.QUERY_PARALLELISM,_Fields.SCHEDULING_POLICY,_Fields.IS_SET_SCHEDULING_POLICY,_Fields.NS};
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.RESOURCE_PLAN_NAME, new org.apache.thrift.meta_data.FieldMetaData("resourcePlanName", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.POOL_PATH, new org.apache.thrift.meta_data.FieldMetaData("poolPath", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.ALLOC_FRACTION, new org.apache.thrift.meta_data.FieldMetaData("allocFraction", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE)));
tmpMap.put(_Fields.QUERY_PARALLELISM, new org.apache.thrift.meta_data.FieldMetaData("queryParallelism", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
tmpMap.put(_Fields.SCHEDULING_POLICY, new org.apache.thrift.meta_data.FieldMetaData("schedulingPolicy", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
tmpMap.put(_Fields.IS_SET_SCHEDULING_POLICY, new org.apache.thrift.meta_data.FieldMetaData("isSetSchedulingPolicy", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.NS, new org.apache.thrift.meta_data.FieldMetaData("ns", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(WMNullablePool.class, metaDataMap);
}
public WMNullablePool() {
}
public WMNullablePool(
java.lang.String resourcePlanName,
java.lang.String poolPath)
{
this();
this.resourcePlanName = resourcePlanName;
this.poolPath = poolPath;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public WMNullablePool(WMNullablePool other) {
__isset_bitfield = other.__isset_bitfield;
if (other.isSetResourcePlanName()) {
this.resourcePlanName = other.resourcePlanName;
}
if (other.isSetPoolPath()) {
this.poolPath = other.poolPath;
}
this.allocFraction = other.allocFraction;
this.queryParallelism = other.queryParallelism;
if (other.isSetSchedulingPolicy()) {
this.schedulingPolicy = other.schedulingPolicy;
}
this.isSetSchedulingPolicy = other.isSetSchedulingPolicy;
if (other.isSetNs()) {
this.ns = other.ns;
}
}
public WMNullablePool deepCopy() {
return new WMNullablePool(this);
}
@Override
public void clear() {
this.resourcePlanName = null;
this.poolPath = null;
setAllocFractionIsSet(false);
this.allocFraction = 0.0;
setQueryParallelismIsSet(false);
this.queryParallelism = 0;
this.schedulingPolicy = null;
setIsSetSchedulingPolicyIsSet(false);
this.isSetSchedulingPolicy = false;
this.ns = null;
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getResourcePlanName() {
return this.resourcePlanName;
}
public void setResourcePlanName(@org.apache.thrift.annotation.Nullable java.lang.String resourcePlanName) {
this.resourcePlanName = resourcePlanName;
}
public void unsetResourcePlanName() {
this.resourcePlanName = null;
}
/** Returns true if field resourcePlanName is set (has been assigned a value) and false otherwise */
public boolean isSetResourcePlanName() {
return this.resourcePlanName != null;
}
public void setResourcePlanNameIsSet(boolean value) {
if (!value) {
this.resourcePlanName = null;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getPoolPath() {
return this.poolPath;
}
public void setPoolPath(@org.apache.thrift.annotation.Nullable java.lang.String poolPath) {
this.poolPath = poolPath;
}
public void unsetPoolPath() {
this.poolPath = null;
}
/** Returns true if field poolPath is set (has been assigned a value) and false otherwise */
public boolean isSetPoolPath() {
return this.poolPath != null;
}
public void setPoolPathIsSet(boolean value) {
if (!value) {
this.poolPath = null;
}
}
public double getAllocFraction() {
return this.allocFraction;
}
public void setAllocFraction(double allocFraction) {
this.allocFraction = allocFraction;
setAllocFractionIsSet(true);
}
public void unsetAllocFraction() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ALLOCFRACTION_ISSET_ID);
}
/** Returns true if field allocFraction is set (has been assigned a value) and false otherwise */
public boolean isSetAllocFraction() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ALLOCFRACTION_ISSET_ID);
}
public void setAllocFractionIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ALLOCFRACTION_ISSET_ID, value);
}
public int getQueryParallelism() {
return this.queryParallelism;
}
public void setQueryParallelism(int queryParallelism) {
this.queryParallelism = queryParallelism;
setQueryParallelismIsSet(true);
}
public void unsetQueryParallelism() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __QUERYPARALLELISM_ISSET_ID);
}
/** Returns true if field queryParallelism is set (has been assigned a value) and false otherwise */
public boolean isSetQueryParallelism() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __QUERYPARALLELISM_ISSET_ID);
}
public void setQueryParallelismIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __QUERYPARALLELISM_ISSET_ID, value);
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getSchedulingPolicy() {
return this.schedulingPolicy;
}
public void setSchedulingPolicy(@org.apache.thrift.annotation.Nullable java.lang.String schedulingPolicy) {
this.schedulingPolicy = schedulingPolicy;
}
public void unsetSchedulingPolicy() {
this.schedulingPolicy = null;
}
/** Returns true if field schedulingPolicy is set (has been assigned a value) and false otherwise */
public boolean isSetSchedulingPolicy() {
return this.schedulingPolicy != null;
}
public void setSchedulingPolicyIsSet(boolean value) {
if (!value) {
this.schedulingPolicy = null;
}
}
public boolean isIsSetSchedulingPolicy() {
return this.isSetSchedulingPolicy;
}
public void setIsSetSchedulingPolicy(boolean isSetSchedulingPolicy) {
this.isSetSchedulingPolicy = isSetSchedulingPolicy;
setIsSetSchedulingPolicyIsSet(true);
}
public void unsetIsSetSchedulingPolicy() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ISSETSCHEDULINGPOLICY_ISSET_ID);
}
/** Returns true if field isSetSchedulingPolicy is set (has been assigned a value) and false otherwise */
public boolean isSetIsSetSchedulingPolicy() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ISSETSCHEDULINGPOLICY_ISSET_ID);
}
public void setIsSetSchedulingPolicyIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ISSETSCHEDULINGPOLICY_ISSET_ID, value);
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getNs() {
return this.ns;
}
public void setNs(@org.apache.thrift.annotation.Nullable java.lang.String ns) {
this.ns = ns;
}
public void unsetNs() {
this.ns = null;
}
/** Returns true if field ns is set (has been assigned a value) and false otherwise */
public boolean isSetNs() {
return this.ns != null;
}
public void setNsIsSet(boolean value) {
if (!value) {
this.ns = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case RESOURCE_PLAN_NAME:
if (value == null) {
unsetResourcePlanName();
} else {
setResourcePlanName((java.lang.String)value);
}
break;
case POOL_PATH:
if (value == null) {
unsetPoolPath();
} else {
setPoolPath((java.lang.String)value);
}
break;
case ALLOC_FRACTION:
if (value == null) {
unsetAllocFraction();
} else {
setAllocFraction((java.lang.Double)value);
}
break;
case QUERY_PARALLELISM:
if (value == null) {
unsetQueryParallelism();
} else {
setQueryParallelism((java.lang.Integer)value);
}
break;
case SCHEDULING_POLICY:
if (value == null) {
unsetSchedulingPolicy();
} else {
setSchedulingPolicy((java.lang.String)value);
}
break;
case IS_SET_SCHEDULING_POLICY:
if (value == null) {
unsetIsSetSchedulingPolicy();
} else {
setIsSetSchedulingPolicy((java.lang.Boolean)value);
}
break;
case NS:
if (value == null) {
unsetNs();
} else {
setNs((java.lang.String)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case RESOURCE_PLAN_NAME:
return getResourcePlanName();
case POOL_PATH:
return getPoolPath();
case ALLOC_FRACTION:
return getAllocFraction();
case QUERY_PARALLELISM:
return getQueryParallelism();
case SCHEDULING_POLICY:
return getSchedulingPolicy();
case IS_SET_SCHEDULING_POLICY:
return isIsSetSchedulingPolicy();
case NS:
return getNs();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case RESOURCE_PLAN_NAME:
return isSetResourcePlanName();
case POOL_PATH:
return isSetPoolPath();
case ALLOC_FRACTION:
return isSetAllocFraction();
case QUERY_PARALLELISM:
return isSetQueryParallelism();
case SCHEDULING_POLICY:
return isSetSchedulingPolicy();
case IS_SET_SCHEDULING_POLICY:
return isSetIsSetSchedulingPolicy();
case NS:
return isSetNs();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof WMNullablePool)
return this.equals((WMNullablePool)that);
return false;
}
public boolean equals(WMNullablePool that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_resourcePlanName = true && this.isSetResourcePlanName();
boolean that_present_resourcePlanName = true && that.isSetResourcePlanName();
if (this_present_resourcePlanName || that_present_resourcePlanName) {
if (!(this_present_resourcePlanName && that_present_resourcePlanName))
return false;
if (!this.resourcePlanName.equals(that.resourcePlanName))
return false;
}
boolean this_present_poolPath = true && this.isSetPoolPath();
boolean that_present_poolPath = true && that.isSetPoolPath();
if (this_present_poolPath || that_present_poolPath) {
if (!(this_present_poolPath && that_present_poolPath))
return false;
if (!this.poolPath.equals(that.poolPath))
return false;
}
boolean this_present_allocFraction = true && this.isSetAllocFraction();
boolean that_present_allocFraction = true && that.isSetAllocFraction();
if (this_present_allocFraction || that_present_allocFraction) {
if (!(this_present_allocFraction && that_present_allocFraction))
return false;
if (this.allocFraction != that.allocFraction)
return false;
}
boolean this_present_queryParallelism = true && this.isSetQueryParallelism();
boolean that_present_queryParallelism = true && that.isSetQueryParallelism();
if (this_present_queryParallelism || that_present_queryParallelism) {
if (!(this_present_queryParallelism && that_present_queryParallelism))
return false;
if (this.queryParallelism != that.queryParallelism)
return false;
}
boolean this_present_schedulingPolicy = true && this.isSetSchedulingPolicy();
boolean that_present_schedulingPolicy = true && that.isSetSchedulingPolicy();
if (this_present_schedulingPolicy || that_present_schedulingPolicy) {
if (!(this_present_schedulingPolicy && that_present_schedulingPolicy))
return false;
if (!this.schedulingPolicy.equals(that.schedulingPolicy))
return false;
}
boolean this_present_isSetSchedulingPolicy = true && this.isSetIsSetSchedulingPolicy();
boolean that_present_isSetSchedulingPolicy = true && that.isSetIsSetSchedulingPolicy();
if (this_present_isSetSchedulingPolicy || that_present_isSetSchedulingPolicy) {
if (!(this_present_isSetSchedulingPolicy && that_present_isSetSchedulingPolicy))
return false;
if (this.isSetSchedulingPolicy != that.isSetSchedulingPolicy)
return false;
}
boolean this_present_ns = true && this.isSetNs();
boolean that_present_ns = true && that.isSetNs();
if (this_present_ns || that_present_ns) {
if (!(this_present_ns && that_present_ns))
return false;
if (!this.ns.equals(that.ns))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetResourcePlanName()) ? 131071 : 524287);
if (isSetResourcePlanName())
hashCode = hashCode * 8191 + resourcePlanName.hashCode();
hashCode = hashCode * 8191 + ((isSetPoolPath()) ? 131071 : 524287);
if (isSetPoolPath())
hashCode = hashCode * 8191 + poolPath.hashCode();
hashCode = hashCode * 8191 + ((isSetAllocFraction()) ? 131071 : 524287);
if (isSetAllocFraction())
hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(allocFraction);
hashCode = hashCode * 8191 + ((isSetQueryParallelism()) ? 131071 : 524287);
if (isSetQueryParallelism())
hashCode = hashCode * 8191 + queryParallelism;
hashCode = hashCode * 8191 + ((isSetSchedulingPolicy()) ? 131071 : 524287);
if (isSetSchedulingPolicy())
hashCode = hashCode * 8191 + schedulingPolicy.hashCode();
hashCode = hashCode * 8191 + ((isSetIsSetSchedulingPolicy()) ? 131071 : 524287);
if (isSetIsSetSchedulingPolicy())
hashCode = hashCode * 8191 + ((isSetSchedulingPolicy) ? 131071 : 524287);
hashCode = hashCode * 8191 + ((isSetNs()) ? 131071 : 524287);
if (isSetNs())
hashCode = hashCode * 8191 + ns.hashCode();
return hashCode;
}
@Override
public int compareTo(WMNullablePool other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetResourcePlanName(), other.isSetResourcePlanName());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetResourcePlanName()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.resourcePlanName, other.resourcePlanName);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetPoolPath(), other.isSetPoolPath());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetPoolPath()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.poolPath, other.poolPath);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetAllocFraction(), other.isSetAllocFraction());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetAllocFraction()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.allocFraction, other.allocFraction);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetQueryParallelism(), other.isSetQueryParallelism());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetQueryParallelism()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.queryParallelism, other.queryParallelism);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetSchedulingPolicy(), other.isSetSchedulingPolicy());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSchedulingPolicy()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.schedulingPolicy, other.schedulingPolicy);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetIsSetSchedulingPolicy(), other.isSetIsSetSchedulingPolicy());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetIsSetSchedulingPolicy()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.isSetSchedulingPolicy, other.isSetSchedulingPolicy);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = java.lang.Boolean.compare(isSetNs(), other.isSetNs());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetNs()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.ns, other.ns);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("WMNullablePool(");
boolean first = true;
sb.append("resourcePlanName:");
if (this.resourcePlanName == null) {
sb.append("null");
} else {
sb.append(this.resourcePlanName);
}
first = false;
if (!first) sb.append(", ");
sb.append("poolPath:");
if (this.poolPath == null) {
sb.append("null");
} else {
sb.append(this.poolPath);
}
first = false;
if (isSetAllocFraction()) {
if (!first) sb.append(", ");
sb.append("allocFraction:");
sb.append(this.allocFraction);
first = false;
}
if (isSetQueryParallelism()) {
if (!first) sb.append(", ");
sb.append("queryParallelism:");
sb.append(this.queryParallelism);
first = false;
}
if (isSetSchedulingPolicy()) {
if (!first) sb.append(", ");
sb.append("schedulingPolicy:");
if (this.schedulingPolicy == null) {
sb.append("null");
} else {
sb.append(this.schedulingPolicy);
}
first = false;
}
if (isSetIsSetSchedulingPolicy()) {
if (!first) sb.append(", ");
sb.append("isSetSchedulingPolicy:");
sb.append(this.isSetSchedulingPolicy);
first = false;
}
if (isSetNs()) {
if (!first) sb.append(", ");
sb.append("ns:");
if (this.ns == null) {
sb.append("null");
} else {
sb.append(this.ns);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if (!isSetResourcePlanName()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'resourcePlanName' is unset! Struct:" + toString());
}
if (!isSetPoolPath()) {
throw new org.apache.thrift.protocol.TProtocolException("Required field 'poolPath' is unset! Struct:" + toString());
}
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class WMNullablePoolStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public WMNullablePoolStandardScheme getScheme() {
return new WMNullablePoolStandardScheme();
}
}
private static class WMNullablePoolStandardScheme extends org.apache.thrift.scheme.StandardScheme<WMNullablePool> {
public void read(org.apache.thrift.protocol.TProtocol iprot, WMNullablePool struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // RESOURCE_PLAN_NAME
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.resourcePlanName = iprot.readString();
struct.setResourcePlanNameIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // POOL_PATH
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.poolPath = iprot.readString();
struct.setPoolPathIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 3: // ALLOC_FRACTION
if (schemeField.type == org.apache.thrift.protocol.TType.DOUBLE) {
struct.allocFraction = iprot.readDouble();
struct.setAllocFractionIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 4: // QUERY_PARALLELISM
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.queryParallelism = iprot.readI32();
struct.setQueryParallelismIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 5: // SCHEDULING_POLICY
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.schedulingPolicy = iprot.readString();
struct.setSchedulingPolicyIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 6: // IS_SET_SCHEDULING_POLICY
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.isSetSchedulingPolicy = iprot.readBool();
struct.setIsSetSchedulingPolicyIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 7: // NS
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.ns = iprot.readString();
struct.setNsIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, WMNullablePool struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.resourcePlanName != null) {
oprot.writeFieldBegin(RESOURCE_PLAN_NAME_FIELD_DESC);
oprot.writeString(struct.resourcePlanName);
oprot.writeFieldEnd();
}
if (struct.poolPath != null) {
oprot.writeFieldBegin(POOL_PATH_FIELD_DESC);
oprot.writeString(struct.poolPath);
oprot.writeFieldEnd();
}
if (struct.isSetAllocFraction()) {
oprot.writeFieldBegin(ALLOC_FRACTION_FIELD_DESC);
oprot.writeDouble(struct.allocFraction);
oprot.writeFieldEnd();
}
if (struct.isSetQueryParallelism()) {
oprot.writeFieldBegin(QUERY_PARALLELISM_FIELD_DESC);
oprot.writeI32(struct.queryParallelism);
oprot.writeFieldEnd();
}
if (struct.schedulingPolicy != null) {
if (struct.isSetSchedulingPolicy()) {
oprot.writeFieldBegin(SCHEDULING_POLICY_FIELD_DESC);
oprot.writeString(struct.schedulingPolicy);
oprot.writeFieldEnd();
}
}
if (struct.isSetIsSetSchedulingPolicy()) {
oprot.writeFieldBegin(IS_SET_SCHEDULING_POLICY_FIELD_DESC);
oprot.writeBool(struct.isSetSchedulingPolicy);
oprot.writeFieldEnd();
}
if (struct.ns != null) {
if (struct.isSetNs()) {
oprot.writeFieldBegin(NS_FIELD_DESC);
oprot.writeString(struct.ns);
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class WMNullablePoolTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public WMNullablePoolTupleScheme getScheme() {
return new WMNullablePoolTupleScheme();
}
}
private static class WMNullablePoolTupleScheme extends org.apache.thrift.scheme.TupleScheme<WMNullablePool> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, WMNullablePool struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
oprot.writeString(struct.resourcePlanName);
oprot.writeString(struct.poolPath);
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetAllocFraction()) {
optionals.set(0);
}
if (struct.isSetQueryParallelism()) {
optionals.set(1);
}
if (struct.isSetSchedulingPolicy()) {
optionals.set(2);
}
if (struct.isSetIsSetSchedulingPolicy()) {
optionals.set(3);
}
if (struct.isSetNs()) {
optionals.set(4);
}
oprot.writeBitSet(optionals, 5);
if (struct.isSetAllocFraction()) {
oprot.writeDouble(struct.allocFraction);
}
if (struct.isSetQueryParallelism()) {
oprot.writeI32(struct.queryParallelism);
}
if (struct.isSetSchedulingPolicy()) {
oprot.writeString(struct.schedulingPolicy);
}
if (struct.isSetIsSetSchedulingPolicy()) {
oprot.writeBool(struct.isSetSchedulingPolicy);
}
if (struct.isSetNs()) {
oprot.writeString(struct.ns);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, WMNullablePool struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
struct.resourcePlanName = iprot.readString();
struct.setResourcePlanNameIsSet(true);
struct.poolPath = iprot.readString();
struct.setPoolPathIsSet(true);
java.util.BitSet incoming = iprot.readBitSet(5);
if (incoming.get(0)) {
struct.allocFraction = iprot.readDouble();
struct.setAllocFractionIsSet(true);
}
if (incoming.get(1)) {
struct.queryParallelism = iprot.readI32();
struct.setQueryParallelismIsSet(true);
}
if (incoming.get(2)) {
struct.schedulingPolicy = iprot.readString();
struct.setSchedulingPolicyIsSet(true);
}
if (incoming.get(3)) {
struct.isSetSchedulingPolicy = iprot.readBool();
struct.setIsSetSchedulingPolicyIsSet(true);
}
if (incoming.get(4)) {
struct.ns = iprot.readString();
struct.setNsIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
|
googleapis/google-cloud-java | 35,741 | java-beyondcorp-clientconnectorservices/google-cloud-beyondcorp-clientconnectorservices/src/test/java/com/google/cloud/beyondcorp/clientconnectorservices/v1/ClientConnectorServicesServiceClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.beyondcorp.clientconnectorservices.v1;
import static com.google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesServiceClient.ListClientConnectorServicesPagedResponse;
import static com.google.cloud.beyondcorp.clientconnectorservices.v1.ClientConnectorServicesServiceClient.ListLocationsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.iam.v1.AuditConfig;
import com.google.iam.v1.Binding;
import com.google.iam.v1.GetIamPolicyRequest;
import com.google.iam.v1.GetPolicyOptions;
import com.google.iam.v1.Policy;
import com.google.iam.v1.SetIamPolicyRequest;
import com.google.iam.v1.TestIamPermissionsRequest;
import com.google.iam.v1.TestIamPermissionsResponse;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.ByteString;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class ClientConnectorServicesServiceClientTest {
private static MockClientConnectorServicesService mockClientConnectorServicesService;
private static MockIAMPolicy mockIAMPolicy;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private ClientConnectorServicesServiceClient client;
@BeforeClass
public static void startStaticServer() {
mockClientConnectorServicesService = new MockClientConnectorServicesService();
mockLocations = new MockLocations();
mockIAMPolicy = new MockIAMPolicy();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(),
Arrays.<MockGrpcService>asList(
mockClientConnectorServicesService, mockLocations, mockIAMPolicy));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
ClientConnectorServicesServiceSettings settings =
ClientConnectorServicesServiceSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = ClientConnectorServicesServiceClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void listClientConnectorServicesTest() throws Exception {
ClientConnectorService responsesElement = ClientConnectorService.newBuilder().build();
ListClientConnectorServicesResponse expectedResponse =
ListClientConnectorServicesResponse.newBuilder()
.setNextPageToken("")
.addAllClientConnectorServices(Arrays.asList(responsesElement))
.build();
mockClientConnectorServicesService.addResponse(expectedResponse);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ListClientConnectorServicesPagedResponse pagedListResponse =
client.listClientConnectorServices(parent);
List<ClientConnectorService> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getClientConnectorServicesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockClientConnectorServicesService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListClientConnectorServicesRequest actualRequest =
((ListClientConnectorServicesRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listClientConnectorServicesExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockClientConnectorServicesService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
client.listClientConnectorServices(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listClientConnectorServicesTest2() throws Exception {
ClientConnectorService responsesElement = ClientConnectorService.newBuilder().build();
ListClientConnectorServicesResponse expectedResponse =
ListClientConnectorServicesResponse.newBuilder()
.setNextPageToken("")
.addAllClientConnectorServices(Arrays.asList(responsesElement))
.build();
mockClientConnectorServicesService.addResponse(expectedResponse);
String parent = "parent-995424086";
ListClientConnectorServicesPagedResponse pagedListResponse =
client.listClientConnectorServices(parent);
List<ClientConnectorService> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getClientConnectorServicesList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockClientConnectorServicesService.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListClientConnectorServicesRequest actualRequest =
((ListClientConnectorServicesRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listClientConnectorServicesExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockClientConnectorServicesService.addException(exception);
try {
String parent = "parent-995424086";
client.listClientConnectorServices(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getClientConnectorServiceTest() throws Exception {
ClientConnectorService expectedResponse =
ClientConnectorService.newBuilder()
.setName(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDisplayName("displayName1714148973")
.setIngress(ClientConnectorService.Ingress.newBuilder().build())
.setEgress(ClientConnectorService.Egress.newBuilder().build())
.build();
mockClientConnectorServicesService.addResponse(expectedResponse);
ClientConnectorServiceName name =
ClientConnectorServiceName.of("[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]");
ClientConnectorService actualResponse = client.getClientConnectorService(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockClientConnectorServicesService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetClientConnectorServiceRequest actualRequest =
((GetClientConnectorServiceRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getClientConnectorServiceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockClientConnectorServicesService.addException(exception);
try {
ClientConnectorServiceName name =
ClientConnectorServiceName.of("[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]");
client.getClientConnectorService(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getClientConnectorServiceTest2() throws Exception {
ClientConnectorService expectedResponse =
ClientConnectorService.newBuilder()
.setName(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDisplayName("displayName1714148973")
.setIngress(ClientConnectorService.Ingress.newBuilder().build())
.setEgress(ClientConnectorService.Egress.newBuilder().build())
.build();
mockClientConnectorServicesService.addResponse(expectedResponse);
String name = "name3373707";
ClientConnectorService actualResponse = client.getClientConnectorService(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockClientConnectorServicesService.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetClientConnectorServiceRequest actualRequest =
((GetClientConnectorServiceRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getClientConnectorServiceExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockClientConnectorServicesService.addException(exception);
try {
String name = "name3373707";
client.getClientConnectorService(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createClientConnectorServiceTest() throws Exception {
ClientConnectorService expectedResponse =
ClientConnectorService.newBuilder()
.setName(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDisplayName("displayName1714148973")
.setIngress(ClientConnectorService.Ingress.newBuilder().build())
.setEgress(ClientConnectorService.Egress.newBuilder().build())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createClientConnectorServiceTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockClientConnectorServicesService.addResponse(resultOperation);
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ClientConnectorService clientConnectorService = ClientConnectorService.newBuilder().build();
String clientConnectorServiceId = "clientConnectorServiceId766123022";
ClientConnectorService actualResponse =
client
.createClientConnectorServiceAsync(
parent, clientConnectorService, clientConnectorServiceId)
.get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockClientConnectorServicesService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateClientConnectorServiceRequest actualRequest =
((CreateClientConnectorServiceRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(clientConnectorService, actualRequest.getClientConnectorService());
Assert.assertEquals(clientConnectorServiceId, actualRequest.getClientConnectorServiceId());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createClientConnectorServiceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockClientConnectorServicesService.addException(exception);
try {
LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]");
ClientConnectorService clientConnectorService = ClientConnectorService.newBuilder().build();
String clientConnectorServiceId = "clientConnectorServiceId766123022";
client
.createClientConnectorServiceAsync(
parent, clientConnectorService, clientConnectorServiceId)
.get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void createClientConnectorServiceTest2() throws Exception {
ClientConnectorService expectedResponse =
ClientConnectorService.newBuilder()
.setName(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDisplayName("displayName1714148973")
.setIngress(ClientConnectorService.Ingress.newBuilder().build())
.setEgress(ClientConnectorService.Egress.newBuilder().build())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("createClientConnectorServiceTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockClientConnectorServicesService.addResponse(resultOperation);
String parent = "parent-995424086";
ClientConnectorService clientConnectorService = ClientConnectorService.newBuilder().build();
String clientConnectorServiceId = "clientConnectorServiceId766123022";
ClientConnectorService actualResponse =
client
.createClientConnectorServiceAsync(
parent, clientConnectorService, clientConnectorServiceId)
.get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockClientConnectorServicesService.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateClientConnectorServiceRequest actualRequest =
((CreateClientConnectorServiceRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(clientConnectorService, actualRequest.getClientConnectorService());
Assert.assertEquals(clientConnectorServiceId, actualRequest.getClientConnectorServiceId());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createClientConnectorServiceExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockClientConnectorServicesService.addException(exception);
try {
String parent = "parent-995424086";
ClientConnectorService clientConnectorService = ClientConnectorService.newBuilder().build();
String clientConnectorServiceId = "clientConnectorServiceId766123022";
client
.createClientConnectorServiceAsync(
parent, clientConnectorService, clientConnectorServiceId)
.get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void updateClientConnectorServiceTest() throws Exception {
ClientConnectorService expectedResponse =
ClientConnectorService.newBuilder()
.setName(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.setCreateTime(Timestamp.newBuilder().build())
.setUpdateTime(Timestamp.newBuilder().build())
.setDisplayName("displayName1714148973")
.setIngress(ClientConnectorService.Ingress.newBuilder().build())
.setEgress(ClientConnectorService.Egress.newBuilder().build())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("updateClientConnectorServiceTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockClientConnectorServicesService.addResponse(resultOperation);
ClientConnectorService clientConnectorService = ClientConnectorService.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
ClientConnectorService actualResponse =
client.updateClientConnectorServiceAsync(clientConnectorService, updateMask).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockClientConnectorServicesService.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateClientConnectorServiceRequest actualRequest =
((UpdateClientConnectorServiceRequest) actualRequests.get(0));
Assert.assertEquals(clientConnectorService, actualRequest.getClientConnectorService());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateClientConnectorServiceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockClientConnectorServicesService.addException(exception);
try {
ClientConnectorService clientConnectorService = ClientConnectorService.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateClientConnectorServiceAsync(clientConnectorService, updateMask).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteClientConnectorServiceTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteClientConnectorServiceTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockClientConnectorServicesService.addResponse(resultOperation);
ClientConnectorServiceName name =
ClientConnectorServiceName.of("[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]");
client.deleteClientConnectorServiceAsync(name).get();
List<AbstractMessage> actualRequests = mockClientConnectorServicesService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteClientConnectorServiceRequest actualRequest =
((DeleteClientConnectorServiceRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteClientConnectorServiceExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockClientConnectorServicesService.addException(exception);
try {
ClientConnectorServiceName name =
ClientConnectorServiceName.of("[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]");
client.deleteClientConnectorServiceAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void deleteClientConnectorServiceTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("deleteClientConnectorServiceTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockClientConnectorServicesService.addResponse(resultOperation);
String name = "name3373707";
client.deleteClientConnectorServiceAsync(name).get();
List<AbstractMessage> actualRequests = mockClientConnectorServicesService.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteClientConnectorServiceRequest actualRequest =
((DeleteClientConnectorServiceRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteClientConnectorServiceExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockClientConnectorServicesService.addException(exception);
try {
String name = "name3373707";
client.deleteClientConnectorServiceAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void setIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
Policy actualResponse = client.setIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
SetIamPolicyRequest actualRequest = ((SetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPolicy(), actualRequest.getPolicy());
Assert.assertEquals(request.getUpdateMask(), actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void setIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
SetIamPolicyRequest request =
SetIamPolicyRequest.newBuilder()
.setResource(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.setPolicy(Policy.newBuilder().build())
.setUpdateMask(FieldMask.newBuilder().build())
.build();
client.setIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getIamPolicyTest() throws Exception {
Policy expectedResponse =
Policy.newBuilder()
.setVersion(351608024)
.addAllBindings(new ArrayList<Binding>())
.addAllAuditConfigs(new ArrayList<AuditConfig>())
.setEtag(ByteString.EMPTY)
.build();
mockIAMPolicy.addResponse(expectedResponse);
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
Policy actualResponse = client.getIamPolicy(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetIamPolicyRequest actualRequest = ((GetIamPolicyRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getOptions(), actualRequest.getOptions());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getIamPolicyExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
GetIamPolicyRequest request =
GetIamPolicyRequest.newBuilder()
.setResource(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.setOptions(GetPolicyOptions.newBuilder().build())
.build();
client.getIamPolicy(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void testIamPermissionsTest() throws Exception {
TestIamPermissionsResponse expectedResponse =
TestIamPermissionsResponse.newBuilder().addAllPermissions(new ArrayList<String>()).build();
mockIAMPolicy.addResponse(expectedResponse);
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
TestIamPermissionsResponse actualResponse = client.testIamPermissions(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockIAMPolicy.getRequests();
Assert.assertEquals(1, actualRequests.size());
TestIamPermissionsRequest actualRequest = ((TestIamPermissionsRequest) actualRequests.get(0));
Assert.assertEquals(request.getResource(), actualRequest.getResource());
Assert.assertEquals(request.getPermissionsList(), actualRequest.getPermissionsList());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void testIamPermissionsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockIAMPolicy.addException(exception);
try {
TestIamPermissionsRequest request =
TestIamPermissionsRequest.newBuilder()
.setResource(
ClientConnectorServiceName.of(
"[PROJECT]", "[LOCATION]", "[CLIENT_CONNECTOR_SERVICE]")
.toString())
.addAllPermissions(new ArrayList<String>())
.build();
client.testIamPermissions(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
apache/cxf | 35,732 | rt/transports/http-jetty/src/test/java/org/apache/cxf/transport/http_jetty/JettyHTTPDestinationTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.transport.http_jetty;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import javax.xml.namespace.QName;
import jakarta.servlet.ServletInputStream;
import jakarta.servlet.ServletOutputStream;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.xml.bind.JAXBElement;
import org.apache.cxf.Bus;
import org.apache.cxf.BusException;
import org.apache.cxf.BusFactory;
import org.apache.cxf.bus.extension.ExtensionManagerBus;
import org.apache.cxf.common.util.StringUtils;
import org.apache.cxf.configuration.security.AuthorizationPolicy;
import org.apache.cxf.continuations.SuspendedInvocationException;
import org.apache.cxf.endpoint.EndpointResolverRegistry;
import org.apache.cxf.helpers.CastUtils;
import org.apache.cxf.io.AbstractWrappedOutputStream;
import org.apache.cxf.message.ExchangeImpl;
import org.apache.cxf.message.Message;
import org.apache.cxf.message.MessageImpl;
import org.apache.cxf.policy.PolicyDataEngine;
import org.apache.cxf.security.transport.TLSSessionInfo;
import org.apache.cxf.service.model.EndpointInfo;
import org.apache.cxf.service.model.ServiceInfo;
import org.apache.cxf.transport.Conduit;
import org.apache.cxf.transport.ConduitInitiator;
import org.apache.cxf.transport.ConduitInitiatorManager;
import org.apache.cxf.transport.Destination;
import org.apache.cxf.transport.MessageObserver;
import org.apache.cxf.transport.http.AbstractHTTPDestination;
import org.apache.cxf.transport.http.ContinuationProviderFactory;
import org.apache.cxf.transport.http.DestinationRegistry;
import org.apache.cxf.transport.http.HTTPTransportFactory;
import org.apache.cxf.transport.http.auth.DefaultBasicAuthSupplier;
import org.apache.cxf.transports.http.configuration.HTTPServerPolicy;
import org.apache.cxf.ws.addressing.AddressingProperties;
import org.apache.cxf.ws.addressing.EndpointReferenceType;
import org.apache.cxf.ws.addressing.EndpointReferenceUtils;
import org.apache.cxf.ws.addressing.JAXWSAConstants;
import org.eclipse.jetty.ee10.servlet.ServletRequestHttpWrapper;
import org.eclipse.jetty.ee10.servlet.ServletResponseHttpWrapper;
import org.eclipse.jetty.http.HttpFields;
import org.junit.After;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.ArgumentMatchers.isA;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class JettyHTTPDestinationTest {
protected static final String AUTH_HEADER = "Authorization";
protected static final String USER = "copernicus";
protected static final String PASSWD = "epicycles";
protected static final String BASIC_AUTH = DefaultBasicAuthSupplier.getBasicAuthHeader(USER, PASSWD);
private static final String NOWHERE = "http://nada.nothing.nowhere.null/";
private static final String PAYLOAD = "message payload";
private static final String CHALLENGE_HEADER = "WWW-Authenticate";
private static final String BASIC_CHALLENGE = "Basic realm=terra";
private static final String DIGEST_CHALLENGE = "Digest realm=luna";
private static final String CUSTOM_CHALLENGE = "Custom realm=sol";
private Bus bus;
private Bus threadDefaultBus;
private Conduit decoupledBackChannel;
private EndpointInfo endpointInfo;
private EndpointReferenceType address;
private JettyHTTPServerEngine engine;
private HTTPServerPolicy policy;
private JettyHTTPDestination destination;
private ServletRequestHttpWrapper request;
private ServletResponseHttpWrapper response;
private Message inMessage;
private Message outMessage;
private MessageObserver observer;
private ServletInputStream is;
private ServletOutputStream os;
private HTTPTransportFactory transportFactory;
/**
* This class replaces the engine in the Jetty Destination.
*/
private class EasyMockJettyHTTPDestination
extends JettyHTTPDestination {
EasyMockJettyHTTPDestination(Bus bus,
DestinationRegistry registry,
EndpointInfo endpointInfo,
JettyHTTPServerEngineFactory serverEngineFactory,
JettyHTTPServerEngine easyMockEngine) throws IOException {
super(bus, registry, endpointInfo, serverEngineFactory);
engine = easyMockEngine;
}
@Override
public void retrieveEngine() {
// Leave engine alone.
}
}
@After
public void tearDown() {
if (bus != null) {
bus.shutdown(true);
}
bus = null;
transportFactory = null;
decoupledBackChannel = null;
address = null;
engine = null;
request = null;
response = null;
inMessage = null;
outMessage = null;
is = null;
os = null;
destination = null;
BusFactory.setDefaultBus(null);
}
@Test
public void testGetAddress() throws Exception {
destination = setUpDestination();
EndpointReferenceType ref = destination.getAddress();
assertNotNull("unexpected null address", ref);
assertEquals("unexpected address",
EndpointReferenceUtils.getAddress(ref),
StringUtils.addDefaultPortIfMissing(EndpointReferenceUtils.getAddress(address)));
assertEquals("unexpected service name local part",
EndpointReferenceUtils.getServiceName(ref, bus).getLocalPart(),
"Service");
assertEquals("unexpected portName",
EndpointReferenceUtils.getPortName(ref),
"Port");
}
@Test
public void testRandomPortAllocation() throws Exception {
bus = BusFactory.getDefaultBus(true);
transportFactory = new HTTPTransportFactory();
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setName(new QName("bla", "Service"));
EndpointInfo ei = new EndpointInfo(serviceInfo, "");
ei.setName(new QName("bla", "Port"));
Destination d1 = transportFactory.getDestination(ei, bus);
URL url = new URL(d1.getAddress().getAddress().getValue());
assertTrue("No random port has been allocated",
url.getPort() > 0);
}
@Test
public void testSuspendedException() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false);
final RuntimeException ex = new RuntimeException();
observer = new MessageObserver() {
public void onMessage(Message m) {
throw new SuspendedInvocationException(ex);
}
};
destination.setMessageObserver(observer);
try {
destination.doService(request, response);
fail("Suspended invocation swallowed");
} catch (RuntimeException runtimeEx) {
assertSame("Original exception is not preserved", ex, runtimeEx);
}
}
@Test
public void testContinuationsIgnored() throws Exception {
HttpServletRequest httpRequest = mock(HttpServletRequest.class);
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setName(new QName("bla", "Service"));
EndpointInfo ei = new EndpointInfo(serviceInfo, "");
ei.setName(new QName("bla", "Port"));
final JettyHTTPServerEngine httpEngine = new JettyHTTPServerEngine();
httpEngine.setContinuationsEnabled(false);
JettyHTTPServerEngineFactory factory = new JettyHTTPServerEngineFactory() {
@Override
public JettyHTTPServerEngine retrieveJettyHTTPServerEngine(int port) {
return httpEngine;
}
};
Bus b2 = new ExtensionManagerBus();
transportFactory = new HTTPTransportFactory();
b2.setExtension(factory, JettyHTTPServerEngineFactory.class);
TestJettyDestination testDestination =
new TestJettyDestination(b2,
transportFactory.getRegistry(),
ei,
factory);
testDestination.finalizeConfig();
Message mi = testDestination.retrieveFromContinuation(httpRequest);
assertNull("Continuations must be ignored", mi);
}
@Test
public void testGetMultiple() throws Exception {
bus = BusFactory.getDefaultBus(true);
transportFactory = new HTTPTransportFactory();
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setName(new QName("bla", "Service"));
EndpointInfo ei = new EndpointInfo(serviceInfo, "");
ei.setName(new QName("bla", "Port"));
ei.setAddress("http://foo");
Destination d1 = transportFactory.getDestination(ei, bus);
Destination d2 = transportFactory.getDestination(ei, bus);
// Second get should not generate a new destination. It should just retrieve the existing one
assertEquals(d1, d2);
d2.shutdown();
Destination d3 = transportFactory.getDestination(ei, bus);
// Now a new destination should have been created
assertNotSame(d1, d3);
}
@Test
public void testRemoveServant() throws Exception {
destination = setUpDestination();
setUpRemoveServant();
destination.setMessageObserver(null);
}
@Test
public void testDoServiceRedirectURL() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(true);
destination.doService(request, response);
}
@Test
public void testDoService() throws Exception {
Bus defaultBus = new ExtensionManagerBus();
assertSame("Default thread bus has not been set",
defaultBus, BusFactory.getThreadDefaultBus());
destination = setUpDestination(false, false);
setUpDoService(false);
assertSame("Default thread bus has been unexpectedly reset",
defaultBus, BusFactory.getThreadDefaultBus());
destination.doService(request, response);
verifyDoService();
assertSame("Default thread bus has not been reset",
defaultBus, BusFactory.getThreadDefaultBus());
}
@Test
public void testDoServiceWithHttpGET() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false,
false,
false,
"GET",
"?customerId=abc&cutomerAdd=def",
200);
destination.doService(request, response);
assertNotNull("unexpected null message", inMessage);
assertEquals("unexpected method",
inMessage.get(Message.HTTP_REQUEST_METHOD),
"GET");
assertEquals("unexpected path",
inMessage.get(Message.PATH_INFO),
"/bar/foo");
assertEquals("unexpected query",
inMessage.get(Message.QUERY_STRING),
"?customerId=abc&cutomerAdd=def");
}
@Test
public void testGetAnonBackChannel() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false);
destination.doService(request, response);
setUpInMessage();
Conduit backChannel = destination.getBackChannel(inMessage);
assertNotNull("expected back channel", backChannel);
assertEquals("unexpected target",
EndpointReferenceUtils.ANONYMOUS_ADDRESS,
backChannel.getTarget().getAddress().getValue());
}
@Test
public void testGetBackChannelSend() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false, true);
destination.doService(request, response);
setUpInMessage();
Conduit backChannel =
destination.getBackChannel(inMessage);
outMessage = setUpOutMessage();
backChannel.prepare(outMessage);
verifyBackChannelSend(backChannel, outMessage, 200);
}
@Test
public void testGetBackChannelSendFault() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false, true, 500);
destination.doService(request, response);
setUpInMessage();
Conduit backChannel =
destination.getBackChannel(inMessage);
outMessage = setUpOutMessage();
backChannel.prepare(outMessage);
verifyBackChannelSend(backChannel, outMessage, 500);
}
@Test
public void testGetBackChannelSendOneway() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false, true, 500);
destination.doService(request, response);
setUpInMessage();
Conduit backChannel =
destination.getBackChannel(inMessage);
outMessage = setUpOutMessage();
backChannel.prepare(outMessage);
verifyBackChannelSend(backChannel, outMessage, 500, true);
}
@Test
public void testGetBackChannelSendDecoupled() throws Exception {
destination = setUpDestination(false, false);
setUpDoService(false, true, true, 202);
destination.doService(request, response);
setUpInMessage();
Message partialResponse = setUpOutMessage();
partialResponse.put(Message.PARTIAL_RESPONSE_MESSAGE, Boolean.TRUE);
Conduit partialBackChannel =
destination.getBackChannel(inMessage);
partialBackChannel.prepare(partialResponse);
verifyBackChannelSend(partialBackChannel, partialResponse, 202);
outMessage = setUpOutMessage();
Conduit fullBackChannel =
destination.getBackChannel(inMessage);
fullBackChannel.prepare(outMessage);
}
@Test
public void testServerPolicyInServiceModel()
throws Exception {
policy = new HTTPServerPolicy();
address = getEPR("bar/foo");
bus = BusFactory.getDefaultBus(true);
transportFactory = new HTTPTransportFactory();
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setName(new QName("bla", "Service"));
endpointInfo = new EndpointInfo(serviceInfo, "");
endpointInfo.setName(new QName("bla", "Port"));
endpointInfo.addExtensor(policy);
engine = mock(JettyHTTPServerEngine.class);
endpointInfo.setAddress(NOWHERE + "bar/foo");
JettyHTTPDestination dest =
new EasyMockJettyHTTPDestination(
bus, transportFactory.getRegistry(), endpointInfo, null, engine);
assertEquals(policy, dest.getServer());
}
@Test
public void testMultiplexGetAddressWithId() throws Exception {
destination = setUpDestination();
final String id = "ID2";
EndpointReferenceType refWithId = destination.getAddressWithId(id);
assertNotNull(refWithId);
assertNotNull(refWithId.getReferenceParameters());
assertNotNull(refWithId.getReferenceParameters().getAny());
assertTrue("it is an element",
refWithId.getReferenceParameters().getAny().get(0) instanceof JAXBElement);
JAXBElement<?> el = (JAXBElement<?>) refWithId.getReferenceParameters().getAny().get(0);
assertEquals("match our id", el.getValue(), id);
}
@Test
public void testMultiplexGetAddressWithIdForAddress() throws Exception {
destination = setUpDestination();
destination.setMultiplexWithAddress(true);
final String id = "ID3";
EndpointReferenceType refWithId = destination.getAddressWithId(id);
assertNotNull(refWithId);
assertNull(refWithId.getReferenceParameters());
assertTrue("match our id", EndpointReferenceUtils.getAddress(refWithId).indexOf(id) != -1);
}
@Test
public void testMultiplexGetIdForAddress() throws Exception {
destination = setUpDestination();
destination.setMultiplexWithAddress(true);
final String id = "ID3";
EndpointReferenceType refWithId = destination.getAddressWithId(id);
String pathInfo = EndpointReferenceUtils.getAddress(refWithId);
Map<String, Object> context = new HashMap<>();
assertNull("fails with no context", destination.getId(context));
context.put(Message.PATH_INFO, pathInfo);
String result = destination.getId(context);
assertNotNull(result);
assertEquals("match our id", result, id);
}
@Test
public void testMultiplexGetId() throws Exception {
destination = setUpDestination();
final String id = "ID3";
EndpointReferenceType refWithId = destination.getAddressWithId(id);
Map<String, Object> context = new HashMap<>();
assertNull("fails with no context", destination.getId(context));
AddressingProperties maps = mock(AddressingProperties.class);
when(maps.getToEndpointReference()).thenReturn(refWithId);
context.put(JAXWSAConstants.ADDRESSING_PROPERTIES_INBOUND, maps);
String result = destination.getId(context);
assertNotNull(result);
assertEquals("match our id", result, id);
}
private JettyHTTPDestination setUpDestination()
throws Exception {
return setUpDestination(false, false);
};
private JettyHTTPDestination setUpDestination(
boolean contextMatchOnStem, boolean mockedBus)
throws Exception {
policy = new HTTPServerPolicy();
address = getEPR("bar/foo");
transportFactory = new HTTPTransportFactory();
final ConduitInitiator ci = new ConduitInitiator() {
public Conduit getConduit(EndpointInfo targetInfo, Bus b) throws IOException {
return decoupledBackChannel;
}
public Conduit getConduit(EndpointInfo localInfo, EndpointReferenceType target, Bus b)
throws IOException {
return decoupledBackChannel;
}
public List<String> getTransportIds() {
return null;
}
public Set<String> getUriPrefixes() {
return new HashSet<>(Collections.singletonList("http"));
}
};
ConduitInitiatorManager mgr = new ConduitInitiatorManager() {
public void deregisterConduitInitiator(String name) {
}
public ConduitInitiator getConduitInitiator(String name) throws BusException {
return null;
}
public ConduitInitiator getConduitInitiatorForUri(String uri) {
return ci;
}
public void registerConduitInitiator(String name, ConduitInitiator factory) {
}
};
if (!mockedBus) {
bus = new ExtensionManagerBus();
bus.setExtension(mgr, ConduitInitiatorManager.class);
} else {
bus = mock(Bus.class);
when(bus.getExtension(EndpointResolverRegistry.class)).thenReturn(null);
when(bus.getExtension(ContinuationProviderFactory.class)).thenReturn(null);
when(bus.getExtension(PolicyDataEngine.class)).thenReturn(null);
when(bus.hasExtensionByName("org.apache.cxf.ws.policy.PolicyEngine")).thenReturn(false);
when(bus.getExtension(ClassLoader.class)).thenReturn(this.getClass().getClassLoader());
}
engine = mock(JettyHTTPServerEngine.class);
ServiceInfo serviceInfo = new ServiceInfo();
serviceInfo.setName(new QName("bla", "Service"));
endpointInfo = new EndpointInfo(serviceInfo, "");
endpointInfo.setName(new QName("bla", "Port"));
endpointInfo.setAddress(NOWHERE + "bar/foo");
endpointInfo.addExtensor(policy);
doNothing().when(engine).addServant(eq(new URL(NOWHERE + "bar/foo")),
isA(JettyHTTPHandler.class));
when(engine.getContinuationsEnabled()).thenReturn(true);
JettyHTTPDestination dest = new EasyMockJettyHTTPDestination(bus,
transportFactory.getRegistry(),
endpointInfo,
null,
engine);
dest.retrieveEngine();
policy = dest.getServer();
observer = new MessageObserver() {
public void onMessage(Message m) {
inMessage = m;
threadDefaultBus = BusFactory.getThreadDefaultBus();
}
};
dest.setMessageObserver(observer);
return dest;
}
private void setUpRemoveServant() throws Exception {
doNothing().when(engine).removeServant(eq(new URL(NOWHERE + "bar/foo")));
}
private void setUpDoService(boolean setRedirectURL) throws Exception {
setUpDoService(setRedirectURL, false);
}
private void setUpDoService(boolean setRedirectURL,
boolean sendResponse) throws Exception {
setUpDoService(setRedirectURL,
sendResponse,
false);
}
private void setUpDoService(boolean setRedirectURL,
boolean sendResponse, int status) throws Exception {
String method = "POST";
String query = "?name";
setUpDoService(setRedirectURL, sendResponse, false, method, query, status);
}
private void setUpDoService(boolean setRedirectURL,
boolean sendResponse, boolean decoupled, int status) throws Exception {
String method = "POST";
String query = "?name";
setUpDoService(setRedirectURL, sendResponse, decoupled, method, query, status);
}
private void setUpDoService(boolean setRedirectURL,
boolean sendResponse,
boolean decoupled) throws Exception {
String method = "POST";
String query = "?name";
setUpDoService(setRedirectURL, sendResponse, decoupled, method, query, 200);
}
private void setUpDoService(boolean setRedirectURL,
boolean sendResponse,
boolean decoupled,
String method,
String query,
int status
) throws Exception {
is = mock(ServletInputStream.class);
os = mock(ServletOutputStream.class);
request = mock(ServletRequestHttpWrapper.class);
response = mock(ServletResponseHttpWrapper.class);
when(request.getMethod()).thenReturn(method);
//request.getConnection();
//whenLastCall().thenReturn(null).anyTimes();
when(request.getUserPrincipal()).thenReturn(null);
if (setRedirectURL) {
policy.setRedirectURL(NOWHERE + "foo/bar");
doNothing().when(response).sendRedirect(eq(NOWHERE + "foo/bar"));
doNothing().when(response).flushBuffer();
} else {
//getQueryString for if statement
when(request.getQueryString()).thenReturn(query);
if ("GET".equals(method) && "?wsdl".equals(query)) {
verifyGetWSDLQuery();
} else { // test for the post
when(request.getAttribute(AbstractHTTPDestination.CXF_CONTINUATION_MESSAGE))
.thenReturn(null);
//when(request.getMethod()).thenReturn(method);
when(request.getInputStream()).thenReturn(is);
when(request.getContextPath()).thenReturn("/bar");
when(request.getServletPath()).thenReturn("");
when(request.getPathInfo()).thenReturn("/foo");
when(request.getRequestURI()).thenReturn("/foo");
when(request.getRequestURL())
.thenReturn(new StringBuffer("http://localhost/foo"));
doNothing().when(request)
.setAttribute("org.springframework.web.servlet.HandlerMapping.bestMatchingPattern", "/foo");
when(request.getCharacterEncoding()).thenReturn(StandardCharsets.UTF_8.name());
when(request.getQueryString()).thenReturn(query);
when(request.getHeader("Accept")).thenReturn("*/*");
when(request.getContentType()).thenReturn("text/xml charset=utf8");
when(request.getAttribute("org.eclipse.jetty.ajax.Continuation")).thenReturn(null);
when(request.getAttribute("http.service.redirection")).thenReturn(null);
HttpFields.Mutable httpFields = HttpFields.build();
httpFields.add("content-type", "text/xml");
httpFields.add("content-type", "charset=utf8");
httpFields.put(JettyHTTPDestinationTest.AUTH_HEADER, JettyHTTPDestinationTest.BASIC_AUTH);
when(request.getHeaderNames()).thenReturn(httpFields.getFieldNames());
when(request.getHeaders("content-type")).thenReturn(httpFields.getValues("content-type"));
when(request.getHeaders(JettyHTTPDestinationTest.AUTH_HEADER)).thenReturn(
httpFields.getValues(JettyHTTPDestinationTest.AUTH_HEADER));
when(request.getInputStream()).thenReturn(is);
doNothing().when(response).flushBuffer();
if (sendResponse) {
doNothing().when(response).setStatus(status);
doNothing().when(response).setContentType("text/xml charset=utf8");
doNothing().when(response).addHeader(isA(String.class), isA(String.class));
doNothing().when(response).setContentLength(0);
when(response.getOutputStream()).thenReturn(os);
when(response.getStatus()).thenReturn(status);
doNothing().when(response).flushBuffer();
}
when(request.getAttribute("jakarta.servlet.request.cipher_suite")).thenReturn("anythingwilldoreally");
when(request.getAttribute("javax.net.ssl.session")).thenReturn(null);
when(request.getAttribute("jakarta.servlet.request.X509Certificate")).thenReturn(null);
}
}
if (decoupled) {
setupDecoupledBackChannel();
}
}
private void setupDecoupledBackChannel() throws IOException {
decoupledBackChannel = mock(Conduit.class);
doNothing().when(decoupledBackChannel).setMessageObserver(isA(MessageObserver.class));
doNothing().when(decoupledBackChannel).prepare(isA(Message.class));
}
private void setUpInMessage() {
inMessage.setExchange(new ExchangeImpl());
}
private Message setUpOutMessage() {
Message outMsg = new MessageImpl();
outMsg.putAll(inMessage);
outMsg.setExchange(new ExchangeImpl());
outMsg.put(Message.PROTOCOL_HEADERS,
new TreeMap<String, List<String>>(String.CASE_INSENSITIVE_ORDER));
return outMsg;
}
private void setUpResponseHeaders(Message outMsg) {
Map<String, List<String>> responseHeaders =
CastUtils.cast((Map<?, ?>)outMsg.get(Message.PROTOCOL_HEADERS));
assertNotNull("expected response headers", responseHeaders);
List<String> challenges = new ArrayList<>();
challenges.add(BASIC_CHALLENGE);
challenges.add(DIGEST_CHALLENGE);
challenges.add(CUSTOM_CHALLENGE);
responseHeaders.put(CHALLENGE_HEADER, challenges);
}
private void verifyGetWSDLQuery() throws Exception {
when(request.getRequestURL()).thenReturn(new StringBuffer("http://localhost/bar/foo"));
when(request.getPathInfo()).thenReturn("/bar/foo");
when(request.getCharacterEncoding()).thenReturn(StandardCharsets.UTF_8.name());
when(request.getQueryString()).thenReturn("wsdl");
doNothing().when(response).setContentType("text/xml");
doNothing().when(response).getOutputStream();
}
private void verifyDoService() throws Exception {
assertSame("Default thread bus has not been set for request",
bus, threadDefaultBus);
assertNotNull("unexpected null message", inMessage);
assertSame("unexpected HTTP request",
inMessage.get(AbstractHTTPDestination.HTTP_REQUEST),
request);
assertSame("unexpected HTTP response",
inMessage.get(AbstractHTTPDestination.HTTP_RESPONSE),
response);
assertEquals("unexpected method",
inMessage.get(Message.HTTP_REQUEST_METHOD),
"POST");
assertEquals("unexpected path",
inMessage.get(Message.PATH_INFO),
"/bar/foo");
assertEquals("unexpected query",
inMessage.get(Message.QUERY_STRING),
"?name");
assertNotNull("unexpected query",
inMessage.get(TLSSessionInfo.class));
verifyRequestHeaders();
}
private void verifyRequestHeaders() throws Exception {
Map<String, List<String>> requestHeaders =
CastUtils.cast((Map<?, ?>)inMessage.get(Message.PROTOCOL_HEADERS));
assertNotNull("expected request headers",
requestHeaders);
List<String> values = requestHeaders.get("content-type");
assertNotNull("expected field", values);
assertEquals("unexpected values", 2, values.size());
assertTrue("expected value", values.contains("text/xml"));
assertTrue("expected value", values.contains("charset=utf8"));
values = requestHeaders.get(AUTH_HEADER);
assertNotNull("expected field", values);
assertEquals("unexpected values", 1, values.size());
assertTrue("expected value", values.contains(BASIC_AUTH));
AuthorizationPolicy authpolicy =
inMessage.get(AuthorizationPolicy.class);
assertNotNull("Expected some auth tokens", policy);
assertEquals("expected user",
USER,
authpolicy.getUserName());
assertEquals("expected passwd",
PASSWD,
authpolicy.getPassword());
}
private void verifyResponseHeaders(Message outMsg) throws Exception {
Map<String, List<String>> responseHeaders =
CastUtils.cast((Map<?, ?>)outMsg.get(Message.PROTOCOL_HEADERS));
assertNotNull("expected response headers",
responseHeaders);
//REVISIT CHALLENGE_HEADER's mean
/*assertEquals("expected addField",
3,
response.getAddFieldCallCount());
Enumeration e = response.getFieldValues(CHALLENGE_HEADER);
List<String> challenges = new ArrayList<>();
while (e.hasMoreElements()) {
challenges.add((String)e.nextElement());
}
assertTrue("expected challenge",
challenges.contains(BASIC_CHALLENGE));
assertTrue("expected challenge",
challenges.contains(DIGEST_CHALLENGE));
assertTrue("expected challenge",
challenges.contains(CUSTOM_CHALLENGE));*/
}
private void verifyBackChannelSend(Conduit backChannel,
Message outMsg,
int status) throws Exception {
verifyBackChannelSend(backChannel, outMsg, status, false);
}
private void verifyBackChannelSend(Conduit backChannel,
Message outMsg,
int status,
boolean oneway) throws Exception {
outMsg.getExchange().setOneWay(oneway);
assertTrue("unexpected back channel type",
backChannel instanceof JettyHTTPDestination.BackChannelConduit);
assertTrue("unexpected content formats",
outMsg.getContentFormats().contains(OutputStream.class));
OutputStream responseOS = outMsg.getContent(OutputStream.class);
assertNotNull("expected output stream", responseOS);
assertTrue("unexpected output stream type",
responseOS instanceof AbstractWrappedOutputStream);
outMsg.put(Message.RESPONSE_CODE, status);
responseOS.write(PAYLOAD.getBytes());
setUpResponseHeaders(outMsg);
responseOS.flush();
assertEquals("unexpected status",
status,
response.getStatus());
/*if (status == 500) {
assertEquals("unexpected status message",
"Internal Server Error",
response.getReason());
}*/
verifyResponseHeaders(outMsg);
if (oneway) {
assertNull("unexpected HTTP response",
outMsg.get(AbstractHTTPDestination.HTTP_RESPONSE));
} else {
assertNotNull("expected HTTP response",
outMsg.get(AbstractHTTPDestination.HTTP_RESPONSE));
responseOS.close();
}
}
static EndpointReferenceType getEPR(String s) {
return EndpointReferenceUtils.getEndpointReference(NOWHERE + s);
}
private static class TestJettyDestination extends JettyHTTPDestination {
TestJettyDestination(Bus bus,
DestinationRegistry registry,
EndpointInfo endpointInfo,
JettyHTTPServerEngineFactory serverEngineFactory) throws IOException {
super(bus, registry, endpointInfo, serverEngineFactory);
}
@Override
public Message retrieveFromContinuation(HttpServletRequest request) {
return super.retrieveFromContinuation(request);
}
}
}
|
apache/roller | 35,698 | app/src/main/java/org/apache/roller/weblogger/business/startup/DatabaseInstaller.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. The ASF licenses this file to You
* under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. For additional information regarding
* copyright in this work, please see the NOTICE file in the top level
* directory of this distribution.
*/
package org.apache.roller.weblogger.business.startup;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.roller.weblogger.business.DatabaseProvider;
/**
* Handles the install/upgrade of the Roller Weblogger database when the user
* has configured their installation type to 'auto'.
*/
public class DatabaseInstaller {
private static Log log = LogFactory.getLog(DatabaseInstaller.class);
private final DatabaseProvider db;
private final DatabaseScriptProvider scripts;
private final String version;
private List<String> messages = new ArrayList<>();
// the name of the property which holds the dbversion value
private static final String DBVERSION_PROP = "roller.database.version";
public DatabaseInstaller(DatabaseProvider dbProvider, DatabaseScriptProvider scriptProvider) {
db = dbProvider;
scripts = scriptProvider;
Properties props = new Properties();
try {
props.load(getClass().getResourceAsStream("/roller-version.properties"));
} catch (IOException e) {
log.error("roller-version.properties not found", e);
}
version = props.getProperty("ro.version", "UNKNOWN");
}
/**
* Determine if database schema needs to be upgraded.
*/
public boolean isCreationRequired() {
Connection con = null;
try {
con = db.getConnection();
// just check for a couple key Roller tables
// roller_user table called rolleruser before Roller 5.1
if (tableExists(con, "userrole") && (tableExists(con, "roller_user") || tableExists(con, "rolleruser"))) {
return false;
}
} catch (Exception e) {
throw new RuntimeException("Error checking for tables", e);
} finally {
try {
if (con != null) {
con.close();
}
} catch (Exception ignored) {}
}
return true;
}
/**
* Determine if database schema needs to be upgraded.
*/
public boolean isUpgradeRequired() {
int desiredVersion = parseVersionString(version);
int databaseVersion;
try {
databaseVersion = getDatabaseVersion();
} catch (StartupException ex) {
throw new RuntimeException(ex);
}
// if dbversion is unset then assume a new install, otherwise compare
if (databaseVersion < 0) {
// if this is a fresh db then we need to set the database version
Connection con = null;
try {
con = db.getConnection();
setDatabaseVersion(con, version);
} catch (Exception ioe) {
errorMessage("ERROR setting database version");
} finally {
try {
if (con != null) {
con.close();
}
} catch (Exception ignored) {
}
}
return false;
} else {
return databaseVersion < desiredVersion;
}
}
public List<String> getMessages() {
return messages;
}
private void errorMessage(String msg) {
messages.add(msg);
log.error(msg);
}
private void errorMessage(String msg, Throwable t) {
messages.add(msg);
log.error(msg, t);
}
private void successMessage(String msg) {
messages.add(msg);
log.trace(msg);
}
/**
* Create datatabase tables.
*/
public void createDatabase() throws StartupException {
log.info("Creating Roller Weblogger database tables.");
Connection con = null;
SQLScriptRunner create = null;
try {
con = db.getConnection();
String handle = getDatabaseHandle(con);
create = new SQLScriptRunner(scripts.getDatabaseScript(handle + "/createdb.sql"));
create.runScript(con, true);
messages.addAll(create.getMessages());
setDatabaseVersion(con, version);
} catch (SQLException sqle) {
log.error("ERROR running SQL in database creation script", sqle);
if (create != null) {
messages.addAll(create.getMessages());
}
errorMessage("ERROR running SQL in database creation script");
throw new StartupException("Error running sql script", sqle);
} catch (Exception ioe) {
log.error("ERROR running database creation script", ioe);
if (create != null) {
messages.addAll(create.getMessages());
}
errorMessage("ERROR reading/parsing database creation script");
throw new StartupException("Error running SQL script", ioe);
} finally {
try {
if (con != null) {
con.close();
}
} catch (Exception ignored) {}
}
}
/**
* Upgrade database if dbVersion is older than desiredVersion.
*/
public void upgradeDatabase(boolean runScripts) throws StartupException {
int myVersion = parseVersionString(version);
int dbversion = getDatabaseVersion();
log.info("Database version = "+dbversion);
log.info("Desired version = "+myVersion);
Connection con = null;
try {
con = db.getConnection();
if(dbversion < 0) {
String msg = "Cannot upgrade database tables, Roller database version cannot be determined";
errorMessage(msg);
throw new StartupException(msg);
} else if (dbversion < 310) {
String msg = "Roller " + myVersion + " cannot upgrade from versions older than 3.10; " +
"try first upgrading to an earlier version of Roller.";
errorMessage(msg);
throw new StartupException(msg);
} else if(dbversion >= myVersion) {
log.info("Database is current, no upgrade needed");
return;
}
log.info("Database is old, beginning upgrade to version "+myVersion);
// iterate through each upgrade as needed
// to add to the upgrade sequence simply add a new "if" statement
// for whatever version needed and then define a new method upgradeXXX()
if(dbversion < 400) {
upgradeTo400(con, runScripts);
dbversion = 400;
}
if(dbversion < 500) {
upgradeTo500(con, runScripts);
dbversion = 500;
}
if(dbversion < 510) {
upgradeTo510(con, runScripts);
dbversion = 510;
}
if(dbversion < 520) {
upgradeTo520(con, runScripts);
dbversion = 520;
}
if(dbversion < 610) {
upgradeTo610(con, runScripts);
dbversion = 610;
}
// make sure the database version is the exact version
// we are upgrading too.
updateDatabaseVersion(con, myVersion);
} catch (SQLException e) {
throw new StartupException("ERROR obtaining connection");
} finally {
try {
if (con != null) {
con.close();
}
} catch (Exception ignored) {}
}
}
/**
* Upgrade database to Roller 4.0.0
*/
private void upgradeTo400(Connection con, boolean runScripts) throws StartupException {
successMessage("Doing upgrade to 400 ...");
// first we need to run upgrade scripts
SQLScriptRunner runner = null;
try {
if (runScripts) {
String handle = getDatabaseHandle(con);
String scriptPath = handle + "/310-to-400-migration.sql";
successMessage("Running database upgrade script: "+scriptPath);
runner = new SQLScriptRunner(scripts.getDatabaseScript(scriptPath));
runner.runScript(con, true);
messages.addAll(runner.getMessages());
}
} catch(Exception ex) {
log.error("ERROR running 400 database upgrade script", ex);
if (runner != null) {
messages.addAll(runner.getMessages());
}
errorMessage("Problem upgrading database to version 400", ex);
throw new StartupException("Problem upgrading database to version 400", ex);
}
// now upgrade hierarchical objects data model
try {
successMessage("Populating parentid columns for weblogcategory and folder tables");
// Populate parentid in weblogcategory and folder tables.
//
// We'd like to do something like the below, but few databases
// support multiple table udpates, which are part of SQL-99
//
// update weblogcategory, weblogcategoryassoc
// set weblogcategory.parentid = weblogcategoryassoc.ancestorid
// where
// weblogcategory.id = weblogcategoryassoc.categoryid
// and weblogcategoryassoc.relation = 'PARENT';
//
// update folder,folderassoc
// set folder.parentid = folderassoc.ancestorid
// where
// folder.id = folderassoc.folderid
// and folderassoc.relation = 'PARENT';
PreparedStatement selectParents = con.prepareStatement(
"select categoryid, ancestorid from weblogcategoryassoc where relation='PARENT'");
PreparedStatement updateParent = con.prepareStatement(
"update weblogcategory set parentid=? where id=?");
ResultSet parentSet = selectParents.executeQuery();
while (parentSet.next()) {
String categoryid = parentSet.getString(1);
String parentid = parentSet.getString(2);
updateParent.clearParameters();
updateParent.setString( 1, parentid);
updateParent.setString( 2, categoryid);
updateParent.executeUpdate();
}
selectParents = con.prepareStatement(
"select folderid, ancestorid from folderassoc where relation='PARENT'");
updateParent = con.prepareStatement(
"update folder set parentid=? where id=?");
parentSet = selectParents.executeQuery();
while (parentSet.next()) {
String folderid = parentSet.getString(1);
String parentid = parentSet.getString(2);
updateParent.clearParameters();
updateParent.setString( 1, parentid);
updateParent.setString( 2, folderid);
updateParent.executeUpdate();
}
if (!con.getAutoCommit()) {
con.commit();
}
successMessage("Done populating parentid columns.");
} catch (Exception e) {
errorMessage("Problem upgrading database to version 320", e);
throw new StartupException("Problem upgrading database to version 320", e);
}
try {
successMessage("Populating path columns for weblogcategory and folder tables.");
// Populate path in weblogcategory and folder tables.
//
// It would be nice if there was a simple sql solution for doing
// this, but sadly the only real way to do it is through brute
// force walking the hierarchical trees. Luckily, it seems that
// most people don't create multi-level hierarchies, so hopefully
// this won't be too bad
// set path to '/' for nodes with no parents (aka root nodes)
PreparedStatement setRootPaths = con.prepareStatement(
"update weblogcategory set path = '/' where parentid is NULL");
setRootPaths.clearParameters();
setRootPaths.executeUpdate();
// select all nodes whose parent has no parent (aka 1st level nodes)
PreparedStatement selectL1Children = con.prepareStatement(
"select f.id, f.name from weblogcategory f, weblogcategory p "+
"where f.parentid = p.id and p.parentid is NULL");
// update L1 nodes with their path (/<name>)
PreparedStatement updateL1Children = con.prepareStatement(
"update weblogcategory set path=? where id=?");
ResultSet L1Set = selectL1Children.executeQuery();
while (L1Set.next()) {
String id = L1Set.getString(1);
String name = L1Set.getString(2);
updateL1Children.clearParameters();
updateL1Children.setString( 1, "/"+name);
updateL1Children.setString( 2, id);
updateL1Children.executeUpdate();
}
// now for the complicated part =(
// we need to keep iterating over L2, L3, etc nodes and setting
// their path until all nodes have been updated.
// select all nodes whose parent path has been set, excluding L1 nodes
PreparedStatement selectLxChildren = con.prepareStatement(
"select f.id, f.name, p.path from weblogcategory f, weblogcategory p "+
"where f.parentid = p.id and p.path <> '/' "+
"and p.path is not NULL and f.path is NULL");
// update Lx nodes with their path (<parentPath>/<name>)
PreparedStatement updateLxChildren = con.prepareStatement(
"update weblogcategory set path=? where id=?");
// this loop allows us to run this part of the upgrade process as
// long as is necessary based on the depth of the hierarchy, and
// we use the do/while construct to ensure it's run at least once
int catNumCounted = 0;
do {
log.debug("Doing pass over Lx children for categories");
// reset count for each iteration of outer loop
catNumCounted = 0;
ResultSet LxSet = selectLxChildren.executeQuery();
while (LxSet.next()) {
String id = LxSet.getString(1);
String name = LxSet.getString(2);
String parentPath = LxSet.getString(3);
updateLxChildren.clearParameters();
updateLxChildren.setString( 1, parentPath+"/"+name);
updateLxChildren.setString( 2, id);
updateLxChildren.executeUpdate();
// count the updated rows
catNumCounted++;
}
log.debug("Updated "+catNumCounted+" Lx category paths");
} while(catNumCounted > 0);
// set path to '/' for nodes with no parents (aka root nodes)
setRootPaths = con.prepareStatement(
"update folder set path = '/' where parentid is NULL");
setRootPaths.clearParameters();
setRootPaths.executeUpdate();
// select all nodes whose parent has no parent (aka 1st level nodes)
selectL1Children = con.prepareStatement(
"select f.id, f.name from folder f, folder p "+
"where f.parentid = p.id and p.parentid is NULL");
// update L1 nodes with their path (/<name>)
updateL1Children = con.prepareStatement(
"update folder set path=? where id=?");
L1Set = selectL1Children.executeQuery();
while (L1Set.next()) {
String id = L1Set.getString(1);
String name = L1Set.getString(2);
updateL1Children.clearParameters();
updateL1Children.setString( 1, "/"+name);
updateL1Children.setString( 2, id);
updateL1Children.executeUpdate();
}
// now for the complicated part =(
// we need to keep iterating over L2, L3, etc nodes and setting
// their path until all nodes have been updated.
// select all nodes whose parent path has been set, excluding L1 nodes
selectLxChildren = con.prepareStatement(
"select f.id, f.name, p.path from folder f, folder p "+
"where f.parentid = p.id and p.path <> '/' "+
"and p.path is not NULL and f.path is NULL");
// update Lx nodes with their path (/<name>)
updateLxChildren = con.prepareStatement(
"update folder set path=? where id=?");
// this loop allows us to run this part of the upgrade process as
// long as is necessary based on the depth of the hierarchy, and
// we use the do/while construct to ensure it's run at least once
int folderNumUpdated = 0;
do {
log.debug("Doing pass over Lx children for folders");
// reset count for each iteration of outer loop
folderNumUpdated = 0;
ResultSet LxSet = selectLxChildren.executeQuery();
while (LxSet.next()) {
String id = LxSet.getString(1);
String name = LxSet.getString(2);
String parentPath = LxSet.getString(3);
updateLxChildren.clearParameters();
updateLxChildren.setString( 1, parentPath+"/"+name);
updateLxChildren.setString( 2, id);
updateLxChildren.executeUpdate();
// count the updated rows
folderNumUpdated++;
}
log.debug("Updated "+folderNumUpdated+" Lx folder paths");
} while(folderNumUpdated > 0);
if (!con.getAutoCommit()) {
con.commit();
}
successMessage("Done populating path columns.");
} catch (SQLException e) {
log.error("Problem upgrading database to version 320", e);
throw new StartupException("Problem upgrading database to version 320", e);
}
// 4.0 changes the planet data model a bit, so we need to clean that up
try {
successMessage("Merging planet groups 'all' and 'external'");
// Move all subscriptions in the planet group 'external' to group 'all'
String allGroupId = null;
PreparedStatement selectAllGroupId = con.prepareStatement(
"select id from rag_group where handle = 'all'");
ResultSet rs = selectAllGroupId.executeQuery();
if (rs.next()) {
allGroupId = rs.getString(1);
}
String externalGroupId = null;
PreparedStatement selectExternalGroupId = con.prepareStatement(
"select id from rag_group where handle = 'external'");
rs = selectExternalGroupId.executeQuery();
if (rs.next()) {
externalGroupId = rs.getString(1);
}
// we only need to merge if both of those groups already existed
if(allGroupId != null && externalGroupId != null) {
PreparedStatement updateGroupSubs = con.prepareStatement(
"update rag_group_subscription set group_id = ? where group_id = ?");
updateGroupSubs.clearParameters();
updateGroupSubs.setString( 1, allGroupId);
updateGroupSubs.setString( 2, externalGroupId);
updateGroupSubs.executeUpdate();
// we no longer need the group 'external'
PreparedStatement deleteExternalGroup = con.prepareStatement(
"delete from rag_group where handle = 'external'");
deleteExternalGroup.executeUpdate();
// if we only have group 'external' then just rename it to 'all'
} else if(allGroupId == null && externalGroupId != null) {
// rename 'external' to 'all'
PreparedStatement renameExternalGroup = con.prepareStatement(
"update rag_group set handle = 'all' where handle = 'external'");
renameExternalGroup.executeUpdate();
}
if (!con.getAutoCommit()) {
con.commit();
}
successMessage("Planet group 'external' merged into group 'all'.");
} catch (Exception e) {
errorMessage("Problem upgrading database to version 400", e);
throw new StartupException("Problem upgrading database to version 400", e);
}
// update local planet subscriptions to use new local feed format
try {
successMessage("Upgrading local planet subscription feeds to new feed url format");
// need to start by looking up absolute site url
PreparedStatement selectAbsUrl =
con.prepareStatement("select value from roller_properties where name = 'site.absoluteurl'");
String absUrl = null;
ResultSet rs = selectAbsUrl.executeQuery();
if(rs.next()) {
absUrl = rs.getString(1);
}
if(absUrl != null && absUrl.length() > 0) {
PreparedStatement selectSubs =
con.prepareStatement("select id,feed_url,author from rag_subscription");
PreparedStatement updateSubUrl =
con.prepareStatement("update rag_subscription set last_updated=last_updated, feed_url = ? where id = ?");
ResultSet rset = selectSubs.executeQuery();
while (rset.next()) {
String id = rset.getString(1);
String feed_url = rset.getString(2);
String handle = rset.getString(3);
// only work on local feed urls
if (feed_url.startsWith(absUrl)) {
// update feed_url to 'weblogger:<handle>'
updateSubUrl.clearParameters();
updateSubUrl.setString( 1, "weblogger:"+handle);
updateSubUrl.setString( 2, id);
updateSubUrl.executeUpdate();
}
}
}
if (!con.getAutoCommit()) {
con.commit();
}
successMessage("Comments successfully updated to use new comment plugins.");
} catch (Exception e) {
errorMessage("Problem upgrading database to version 400", e);
throw new StartupException("Problem upgrading database to version 400", e);
}
// upgrade comments to use new plugin mechanism
try {
successMessage("Upgrading existing comments with content-type & plugins");
// look in db and see if comment autoformatting is enabled
boolean autoformatEnabled = false;
String autoformat = null;
PreparedStatement selectIsAutoformtEnabled = con.prepareStatement(
"select value from roller_properties where name = 'users.comments.autoformat'");
ResultSet rs = selectIsAutoformtEnabled.executeQuery();
if (rs.next()) {
autoformat = rs.getString(1);
if(autoformat != null && "true".equals(autoformat)) {
autoformatEnabled = true;
}
}
// look in db and see if comment html escaping is enabled
boolean htmlEnabled = false;
String escapehtml = null;
PreparedStatement selectIsEscapehtmlEnabled = con.prepareStatement(
"select value from roller_properties where name = 'users.comments.escapehtml'");
ResultSet rs1 = selectIsEscapehtmlEnabled.executeQuery();
if (rs1.next()) {
escapehtml = rs1.getString(1);
// NOTE: we allow html only when html escaping is OFF
if(escapehtml != null && !"true".equals(escapehtml)) {
htmlEnabled = true;
}
}
// first lets set the new 'users.comments.htmlenabled' property
PreparedStatement addCommentHtmlProp = con.prepareStatement("insert into roller_properties(name,value) values(?,?)");
addCommentHtmlProp.clearParameters();
addCommentHtmlProp.setString(1, "users.comments.htmlenabled");
if(htmlEnabled) {
addCommentHtmlProp.setString(2, "true");
} else {
addCommentHtmlProp.setString(2, "false");
}
addCommentHtmlProp.executeUpdate();
// determine content-type for existing comments
String contentType = "text/plain";
if(htmlEnabled) {
contentType = "text/html";
}
// determine plugins for existing comments
String plugins = "";
if(htmlEnabled && autoformatEnabled) {
plugins = "HTMLSubset,AutoFormat";
} else if(htmlEnabled) {
plugins = "HTMLSubset";
} else if(autoformatEnabled) {
plugins = "AutoFormat";
}
// set new comment plugins configuration property 'users.comments.plugins'
PreparedStatement addCommentPluginsProp =
con.prepareStatement("insert into roller_properties(name,value) values(?,?)");
addCommentPluginsProp.clearParameters();
addCommentPluginsProp.setString(1, "users.comments.plugins");
addCommentPluginsProp.setString(2, plugins);
addCommentPluginsProp.executeUpdate();
// set content-type for all existing comments
PreparedStatement updateCommentsContentType =
con.prepareStatement("update roller_comment set posttime=posttime, contenttype = ?");
updateCommentsContentType.clearParameters();
updateCommentsContentType.setString(1, contentType);
updateCommentsContentType.executeUpdate();
// set plugins for all existing comments
PreparedStatement updateCommentsPlugins =
con.prepareStatement("update roller_comment set posttime=posttime, plugins = ?");
updateCommentsPlugins.clearParameters();
updateCommentsPlugins.setString(1, plugins);
updateCommentsPlugins.executeUpdate();
if (!con.getAutoCommit()) {
con.commit();
}
successMessage("Comments successfully updated to use new comment plugins.");
} catch (Exception e) {
errorMessage("Problem upgrading database to version 400", e);
throw new StartupException("Problem upgrading database to version 400", e);
}
// finally, upgrade db version string to 400
updateDatabaseVersion(con, 400);
}
/**
* Upgrade database to Roller 5.0
*/
private void upgradeTo500(Connection con, boolean runScripts) throws StartupException {
simpleUpgrade(con, 400, 500, runScripts);
}
/**
* Upgrade database to Roller 5.1
*/
private void upgradeTo510(Connection con, boolean runScripts) throws StartupException {
simpleUpgrade(con, 500, 510, runScripts);
}
/**
* Upgrade database to Roller 5.2
*/
private void upgradeTo520(Connection con, boolean runScripts) throws StartupException {
simpleUpgrade(con, 510, 520, runScripts);
}
/**
* Upgrade database to Roller 6.1
*/
private void upgradeTo610(Connection con, boolean runScripts) throws StartupException {
simpleUpgrade(con, 520, 610, runScripts);
}
/**
* Simple upgrade using single SQL migration script.
*/
private void simpleUpgrade(Connection con, int fromVersion, int toVersion, boolean runScripts) throws StartupException {
// first we need to run upgrade scripts
SQLScriptRunner runner = null;
try {
if (runScripts) {
String handle = getDatabaseHandle(con);
String scriptPath = handle + "/"+fromVersion+"-to-"+toVersion+"-migration.sql";
successMessage("Running database upgrade script: "+scriptPath);
runner = new SQLScriptRunner(scripts.getDatabaseScript(scriptPath));
runner.runScript(con, true);
messages.addAll(runner.getMessages());
}
} catch(Exception ex) {
log.error("ERROR running "+fromVersion+"->"+toVersion+" database upgrade script", ex);
if (runner != null) {
messages.addAll(runner.getMessages());
}
errorMessage("Problem upgrading database to version "+toVersion, ex);
throw new StartupException("Problem upgrading database to version "+toVersion, ex);
}
}
/**
* Use database product name to get the database script directory name.
*/
public String getDatabaseHandle(Connection con) throws SQLException {
String productName = con.getMetaData().getDatabaseProductName();
String handle = "mysql";
if ( productName.toLowerCase().contains("mysql")) {
handle = "mysql";
} else if (productName.toLowerCase().contains("derby")) {
handle = "derby";
} else if (productName.toLowerCase().contains("hsql")) {
handle = "hsqldb";
} else if (productName.toLowerCase().contains("postgres")) {
handle = "postgresql";
} else if (productName.toLowerCase().contains("oracle")) {
handle = "oracle";
} else if (productName.toLowerCase().contains("microsoft")) {
handle = "mssql";
} else if (productName.toLowerCase().contains("db2")) {
handle = "db2";
}
return handle;
}
/**
* Return true if named table exists in database.
*/
private boolean tableExists(Connection con, String tableName) throws SQLException {
ResultSet rs = con.getMetaData().getTables(null, null, "%", null);
while (rs.next()) {
if (tableName.equalsIgnoreCase(rs.getString("TABLE_NAME").toLowerCase())) {
return true;
}
}
return false;
}
private int getDatabaseVersion() throws StartupException {
int dbversion = -1;
// get the current db version
Connection con = null;
try {
con = db.getConnection();
Statement stmt = con.createStatement();
// just check in the roller_properties table
ResultSet rs = stmt.executeQuery(
"select value from roller_properties where name = '"+DBVERSION_PROP+"'");
if(rs.next()) {
dbversion = Integer.parseInt(rs.getString(1));
} else {
// tough to know if this is an upgrade with no db version :/
// however, if roller_properties is not empty then we at least
// we have someone upgrading from 1.2.x
rs = stmt.executeQuery("select count(*) from roller_properties");
if (rs.next() && rs.getInt(1) > 0) {
dbversion = 120;
}
}
} catch(Exception e) {
// that's strange ... hopefully we didn't need to upgrade
log.error("Couldn't lookup current database version", e);
} finally {
try {
if (con != null) {
con.close();
}
} catch (Exception ignored) {}
}
return dbversion;
}
private int parseVersionString(String vstring) {
int myversion = 0;
// NOTE: this assumes a maximum of 3 digits for the version number
// so if we get to 10.0 then we'll need to upgrade this
// strip out non-digits
vstring = vstring.replaceAll("\\Q.\\E", "");
vstring = vstring.replaceAll("\\D", "");
if(vstring.length() > 3) {
vstring = vstring.substring(0, 3);
}
// parse to an int
try {
int parsed = Integer.parseInt(vstring);
if(parsed < 100) {
myversion = parsed * 10;
} else {
myversion = parsed;
}
} catch(Exception e) {}
return myversion;
}
/**
* Insert a new database.version property.
* This should only be called once for new installations
*/
private void setDatabaseVersion(Connection con, String version)
throws StartupException {
setDatabaseVersion(con, parseVersionString(version));
}
/**
* Insert a new database.version property.
* This should only be called once for new installations
*/
private void setDatabaseVersion(Connection con, int version)
throws StartupException {
try (PreparedStatement stmt = con.prepareStatement("insert into roller_properties values(?,?)")) {
stmt.setString(1, DBVERSION_PROP);
stmt.setString(2, String.valueOf(version));
stmt.executeUpdate();
log.debug("Set database verstion to "+version);
} catch(SQLException se) {
throw new StartupException("Error setting database version.", se);
}
}
/**
* Update the existing database.version property
*/
private void updateDatabaseVersion(Connection con, int version)
throws StartupException {
try (PreparedStatement stmt = con.prepareStatement("update roller_properties set value = ? where name = ?")) {
stmt.setString(1, String.valueOf(version));
stmt.setString(2, DBVERSION_PROP);
stmt.executeUpdate();
log.debug("Updated database verstion to "+version);
} catch(SQLException se) {
throw new StartupException("Error setting database version.", se);
}
}
}
|
googleapis/google-cloud-java | 35,660 | java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/RankingRecord.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1alpha/rank_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1alpha;
/**
*
*
* <pre>
* Record message for
* [RankService.Rank][google.cloud.discoveryengine.v1alpha.RankService.Rank]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.RankingRecord}
*/
public final class RankingRecord extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.RankingRecord)
RankingRecordOrBuilder {
private static final long serialVersionUID = 0L;
// Use RankingRecord.newBuilder() to construct.
private RankingRecord(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RankingRecord() {
id_ = "";
title_ = "";
content_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RankingRecord();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.RankServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_RankingRecord_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.RankServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_RankingRecord_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.RankingRecord.class,
com.google.cloud.discoveryengine.v1alpha.RankingRecord.Builder.class);
}
public static final int ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object id_ = "";
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The id.
*/
@java.lang.Override
public java.lang.String getId() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
}
}
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The bytes for id.
*/
@java.lang.Override
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TITLE_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object title_ = "";
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The title.
*/
@java.lang.Override
public java.lang.String getTitle() {
java.lang.Object ref = title_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
title_ = s;
return s;
}
}
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The bytes for title.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTitleBytes() {
java.lang.Object ref = title_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
title_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONTENT_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object content_ = "";
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @return The content.
*/
@java.lang.Override
public java.lang.String getContent() {
java.lang.Object ref = content_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
content_ = s;
return s;
}
}
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @return The bytes for content.
*/
@java.lang.Override
public com.google.protobuf.ByteString getContentBytes() {
java.lang.Object ref = content_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
content_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SCORE_FIELD_NUMBER = 4;
private float score_ = 0F;
/**
*
*
* <pre>
* The score of this record based on the given query and selected model.
* </pre>
*
* <code>float score = 4;</code>
*
* @return The score.
*/
@java.lang.Override
public float getScore() {
return score_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, id_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(title_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, title_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(content_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, content_);
}
if (java.lang.Float.floatToRawIntBits(score_) != 0) {
output.writeFloat(4, score_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(id_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, id_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(title_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, title_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(content_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, content_);
}
if (java.lang.Float.floatToRawIntBits(score_) != 0) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(4, score_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.RankingRecord)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1alpha.RankingRecord other =
(com.google.cloud.discoveryengine.v1alpha.RankingRecord) obj;
if (!getId().equals(other.getId())) return false;
if (!getTitle().equals(other.getTitle())) return false;
if (!getContent().equals(other.getContent())) return false;
if (java.lang.Float.floatToIntBits(getScore())
!= java.lang.Float.floatToIntBits(other.getScore())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ID_FIELD_NUMBER;
hash = (53 * hash) + getId().hashCode();
hash = (37 * hash) + TITLE_FIELD_NUMBER;
hash = (53 * hash) + getTitle().hashCode();
hash = (37 * hash) + CONTENT_FIELD_NUMBER;
hash = (53 * hash) + getContent().hashCode();
hash = (37 * hash) + SCORE_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getScore());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1alpha.RankingRecord prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Record message for
* [RankService.Rank][google.cloud.discoveryengine.v1alpha.RankService.Rank]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1alpha.RankingRecord}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.RankingRecord)
com.google.cloud.discoveryengine.v1alpha.RankingRecordOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1alpha.RankServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_RankingRecord_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1alpha.RankServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_RankingRecord_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1alpha.RankingRecord.class,
com.google.cloud.discoveryengine.v1alpha.RankingRecord.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1alpha.RankingRecord.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
id_ = "";
title_ = "";
content_ = "";
score_ = 0F;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1alpha.RankServiceProto
.internal_static_google_cloud_discoveryengine_v1alpha_RankingRecord_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.RankingRecord getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1alpha.RankingRecord.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.RankingRecord build() {
com.google.cloud.discoveryengine.v1alpha.RankingRecord result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.RankingRecord buildPartial() {
com.google.cloud.discoveryengine.v1alpha.RankingRecord result =
new com.google.cloud.discoveryengine.v1alpha.RankingRecord(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.discoveryengine.v1alpha.RankingRecord result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.id_ = id_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.title_ = title_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.content_ = content_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.score_ = score_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1alpha.RankingRecord) {
return mergeFrom((com.google.cloud.discoveryengine.v1alpha.RankingRecord) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1alpha.RankingRecord other) {
if (other == com.google.cloud.discoveryengine.v1alpha.RankingRecord.getDefaultInstance())
return this;
if (!other.getId().isEmpty()) {
id_ = other.id_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getTitle().isEmpty()) {
title_ = other.title_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getContent().isEmpty()) {
content_ = other.content_;
bitField0_ |= 0x00000004;
onChanged();
}
if (other.getScore() != 0F) {
setScore(other.getScore());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
id_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
title_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
content_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 37:
{
score_ = input.readFloat();
bitField0_ |= 0x00000008;
break;
} // case 37
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object id_ = "";
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The id.
*/
public java.lang.String getId() {
java.lang.Object ref = id_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
id_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @return The bytes for id.
*/
public com.google.protobuf.ByteString getIdBytes() {
java.lang.Object ref = id_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
id_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @param value The id to set.
* @return This builder for chaining.
*/
public Builder setId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearId() {
id_ = getDefaultInstance().getId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The unique ID to represent the record.
* </pre>
*
* <code>string id = 1;</code>
*
* @param value The bytes for id to set.
* @return This builder for chaining.
*/
public Builder setIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
id_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object title_ = "";
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The title.
*/
public java.lang.String getTitle() {
java.lang.Object ref = title_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
title_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @return The bytes for title.
*/
public com.google.protobuf.ByteString getTitleBytes() {
java.lang.Object ref = title_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
title_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @param value The title to set.
* @return This builder for chaining.
*/
public Builder setTitle(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
title_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearTitle() {
title_ = getDefaultInstance().getTitle();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The title of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string title = 2;</code>
*
* @param value The bytes for title to set.
* @return This builder for chaining.
*/
public Builder setTitleBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
title_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object content_ = "";
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @return The content.
*/
public java.lang.String getContent() {
java.lang.Object ref = content_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
content_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @return The bytes for content.
*/
public com.google.protobuf.ByteString getContentBytes() {
java.lang.Object ref = content_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
content_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @param value The content to set.
* @return This builder for chaining.
*/
public Builder setContent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
content_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearContent() {
content_ = getDefaultInstance().getContent();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The content of the record. Empty by default.
* At least one of
* [title][google.cloud.discoveryengine.v1alpha.RankingRecord.title] or
* [content][google.cloud.discoveryengine.v1alpha.RankingRecord.content]
* should be set otherwise an INVALID_ARGUMENT error is thrown.
* </pre>
*
* <code>string content = 3;</code>
*
* @param value The bytes for content to set.
* @return This builder for chaining.
*/
public Builder setContentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
content_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private float score_;
/**
*
*
* <pre>
* The score of this record based on the given query and selected model.
* </pre>
*
* <code>float score = 4;</code>
*
* @return The score.
*/
@java.lang.Override
public float getScore() {
return score_;
}
/**
*
*
* <pre>
* The score of this record based on the given query and selected model.
* </pre>
*
* <code>float score = 4;</code>
*
* @param value The score to set.
* @return This builder for chaining.
*/
public Builder setScore(float value) {
score_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The score of this record based on the given query and selected model.
* </pre>
*
* <code>float score = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearScore() {
bitField0_ = (bitField0_ & ~0x00000008);
score_ = 0F;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.RankingRecord)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.RankingRecord)
private static final com.google.cloud.discoveryengine.v1alpha.RankingRecord DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.RankingRecord();
}
public static com.google.cloud.discoveryengine.v1alpha.RankingRecord getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RankingRecord> PARSER =
new com.google.protobuf.AbstractParser<RankingRecord>() {
@java.lang.Override
public RankingRecord parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RankingRecord> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RankingRecord> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1alpha.RankingRecord getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,775 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/AllocationResourceStatusSpecificSKUAllocation.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* Contains Properties set for the reservation.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation}
*/
public final class AllocationResourceStatusSpecificSKUAllocation
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation)
AllocationResourceStatusSpecificSKUAllocationOrBuilder {
private static final long serialVersionUID = 0L;
// Use AllocationResourceStatusSpecificSKUAllocation.newBuilder() to construct.
private AllocationResourceStatusSpecificSKUAllocation(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AllocationResourceStatusSpecificSKUAllocation() {
sourceInstanceTemplateId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AllocationResourceStatusSpecificSKUAllocation();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AllocationResourceStatusSpecificSKUAllocation_descriptor;
}
@SuppressWarnings({"rawtypes"})
@java.lang.Override
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 402495121:
return internalGetUtilizations();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AllocationResourceStatusSpecificSKUAllocation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation.class,
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation.Builder
.class);
}
private int bitField0_;
public static final int SOURCE_INSTANCE_TEMPLATE_ID_FIELD_NUMBER = 111196154;
@SuppressWarnings("serial")
private volatile java.lang.Object sourceInstanceTemplateId_ = "";
/**
*
*
* <pre>
* ID of the instance template used to populate reservation properties.
* </pre>
*
* <code>optional string source_instance_template_id = 111196154;</code>
*
* @return Whether the sourceInstanceTemplateId field is set.
*/
@java.lang.Override
public boolean hasSourceInstanceTemplateId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* ID of the instance template used to populate reservation properties.
* </pre>
*
* <code>optional string source_instance_template_id = 111196154;</code>
*
* @return The sourceInstanceTemplateId.
*/
@java.lang.Override
public java.lang.String getSourceInstanceTemplateId() {
java.lang.Object ref = sourceInstanceTemplateId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceInstanceTemplateId_ = s;
return s;
}
}
/**
*
*
* <pre>
* ID of the instance template used to populate reservation properties.
* </pre>
*
* <code>optional string source_instance_template_id = 111196154;</code>
*
* @return The bytes for sourceInstanceTemplateId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSourceInstanceTemplateIdBytes() {
java.lang.Object ref = sourceInstanceTemplateId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceInstanceTemplateId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int UTILIZATIONS_FIELD_NUMBER = 402495121;
private static final class UtilizationsDefaultEntryHolder {
static final com.google.protobuf.MapEntry<java.lang.String, java.lang.Long> defaultEntry =
com.google.protobuf.MapEntry.<java.lang.String, java.lang.Long>newDefaultInstance(
com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AllocationResourceStatusSpecificSKUAllocation_UtilizationsEntry_descriptor,
com.google.protobuf.WireFormat.FieldType.STRING,
"",
com.google.protobuf.WireFormat.FieldType.INT64,
0L);
}
@SuppressWarnings("serial")
private com.google.protobuf.MapField<java.lang.String, java.lang.Long> utilizations_;
private com.google.protobuf.MapField<java.lang.String, java.lang.Long> internalGetUtilizations() {
if (utilizations_ == null) {
return com.google.protobuf.MapField.emptyMapField(
UtilizationsDefaultEntryHolder.defaultEntry);
}
return utilizations_;
}
public int getUtilizationsCount() {
return internalGetUtilizations().getMap().size();
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
@java.lang.Override
public boolean containsUtilizations(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetUtilizations().getMap().containsKey(key);
}
/** Use {@link #getUtilizationsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.Long> getUtilizations() {
return getUtilizationsMap();
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.Long> getUtilizationsMap() {
return internalGetUtilizations().getMap();
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
@java.lang.Override
public long getUtilizationsOrDefault(java.lang.String key, long defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.Long> map = internalGetUtilizations().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
@java.lang.Override
public long getUtilizationsOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.Long> map = internalGetUtilizations().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
com.google.protobuf.GeneratedMessageV3.writeString(
output, 111196154, sourceInstanceTemplateId_);
}
com.google.protobuf.GeneratedMessageV3.serializeStringMapTo(
output, internalGetUtilizations(), UtilizationsDefaultEntryHolder.defaultEntry, 402495121);
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.GeneratedMessageV3.computeStringSize(
111196154, sourceInstanceTemplateId_);
}
for (java.util.Map.Entry<java.lang.String, java.lang.Long> entry :
internalGetUtilizations().getMap().entrySet()) {
com.google.protobuf.MapEntry<java.lang.String, java.lang.Long> utilizations__ =
UtilizationsDefaultEntryHolder.defaultEntry
.newBuilderForType()
.setKey(entry.getKey())
.setValue(entry.getValue())
.build();
size += com.google.protobuf.CodedOutputStream.computeMessageSize(402495121, utilizations__);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation other =
(com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation) obj;
if (hasSourceInstanceTemplateId() != other.hasSourceInstanceTemplateId()) return false;
if (hasSourceInstanceTemplateId()) {
if (!getSourceInstanceTemplateId().equals(other.getSourceInstanceTemplateId())) return false;
}
if (!internalGetUtilizations().equals(other.internalGetUtilizations())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSourceInstanceTemplateId()) {
hash = (37 * hash) + SOURCE_INSTANCE_TEMPLATE_ID_FIELD_NUMBER;
hash = (53 * hash) + getSourceInstanceTemplateId().hashCode();
}
if (!internalGetUtilizations().getMap().isEmpty()) {
hash = (37 * hash) + UTILIZATIONS_FIELD_NUMBER;
hash = (53 * hash) + internalGetUtilizations().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Contains Properties set for the reservation.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation)
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AllocationResourceStatusSpecificSKUAllocation_descriptor;
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMapFieldReflection(
int number) {
switch (number) {
case 402495121:
return internalGetUtilizations();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@SuppressWarnings({"rawtypes"})
protected com.google.protobuf.MapFieldReflectionAccessor internalGetMutableMapFieldReflection(
int number) {
switch (number) {
case 402495121:
return internalGetMutableUtilizations();
default:
throw new RuntimeException("Invalid map field number: " + number);
}
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AllocationResourceStatusSpecificSKUAllocation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation.class,
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation.Builder
.class);
}
// Construct using
// com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
sourceInstanceTemplateId_ = "";
internalGetMutableUtilizations().clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_AllocationResourceStatusSpecificSKUAllocation_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation
getDefaultInstanceForType() {
return com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation build() {
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation
buildPartial() {
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation result =
new com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.sourceInstanceTemplateId_ = sourceInstanceTemplateId_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.utilizations_ = internalGetUtilizations();
result.utilizations_.makeImmutable();
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation) {
return mergeFrom(
(com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation other) {
if (other
== com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation
.getDefaultInstance()) return this;
if (other.hasSourceInstanceTemplateId()) {
sourceInstanceTemplateId_ = other.sourceInstanceTemplateId_;
bitField0_ |= 0x00000001;
onChanged();
}
internalGetMutableUtilizations().mergeFrom(other.internalGetUtilizations());
bitField0_ |= 0x00000002;
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 889569234:
{
sourceInstanceTemplateId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 889569234
case -1075006326:
{
com.google.protobuf.MapEntry<java.lang.String, java.lang.Long> utilizations__ =
input.readMessage(
UtilizationsDefaultEntryHolder.defaultEntry.getParserForType(),
extensionRegistry);
internalGetMutableUtilizations()
.getMutableMap()
.put(utilizations__.getKey(), utilizations__.getValue());
bitField0_ |= 0x00000002;
break;
} // case -1075006326
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object sourceInstanceTemplateId_ = "";
/**
*
*
* <pre>
* ID of the instance template used to populate reservation properties.
* </pre>
*
* <code>optional string source_instance_template_id = 111196154;</code>
*
* @return Whether the sourceInstanceTemplateId field is set.
*/
public boolean hasSourceInstanceTemplateId() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* ID of the instance template used to populate reservation properties.
* </pre>
*
* <code>optional string source_instance_template_id = 111196154;</code>
*
* @return The sourceInstanceTemplateId.
*/
public java.lang.String getSourceInstanceTemplateId() {
java.lang.Object ref = sourceInstanceTemplateId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
sourceInstanceTemplateId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* ID of the instance template used to populate reservation properties.
* </pre>
*
* <code>optional string source_instance_template_id = 111196154;</code>
*
* @return The bytes for sourceInstanceTemplateId.
*/
public com.google.protobuf.ByteString getSourceInstanceTemplateIdBytes() {
java.lang.Object ref = sourceInstanceTemplateId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
sourceInstanceTemplateId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* ID of the instance template used to populate reservation properties.
* </pre>
*
* <code>optional string source_instance_template_id = 111196154;</code>
*
* @param value The sourceInstanceTemplateId to set.
* @return This builder for chaining.
*/
public Builder setSourceInstanceTemplateId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
sourceInstanceTemplateId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* ID of the instance template used to populate reservation properties.
* </pre>
*
* <code>optional string source_instance_template_id = 111196154;</code>
*
* @return This builder for chaining.
*/
public Builder clearSourceInstanceTemplateId() {
sourceInstanceTemplateId_ = getDefaultInstance().getSourceInstanceTemplateId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* ID of the instance template used to populate reservation properties.
* </pre>
*
* <code>optional string source_instance_template_id = 111196154;</code>
*
* @param value The bytes for sourceInstanceTemplateId to set.
* @return This builder for chaining.
*/
public Builder setSourceInstanceTemplateIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
sourceInstanceTemplateId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.Long> utilizations_;
private com.google.protobuf.MapField<java.lang.String, java.lang.Long>
internalGetUtilizations() {
if (utilizations_ == null) {
return com.google.protobuf.MapField.emptyMapField(
UtilizationsDefaultEntryHolder.defaultEntry);
}
return utilizations_;
}
private com.google.protobuf.MapField<java.lang.String, java.lang.Long>
internalGetMutableUtilizations() {
if (utilizations_ == null) {
utilizations_ =
com.google.protobuf.MapField.newMapField(UtilizationsDefaultEntryHolder.defaultEntry);
}
if (!utilizations_.isMutable()) {
utilizations_ = utilizations_.copy();
}
bitField0_ |= 0x00000002;
onChanged();
return utilizations_;
}
public int getUtilizationsCount() {
return internalGetUtilizations().getMap().size();
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
@java.lang.Override
public boolean containsUtilizations(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
return internalGetUtilizations().getMap().containsKey(key);
}
/** Use {@link #getUtilizationsMap()} instead. */
@java.lang.Override
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.Long> getUtilizations() {
return getUtilizationsMap();
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
@java.lang.Override
public java.util.Map<java.lang.String, java.lang.Long> getUtilizationsMap() {
return internalGetUtilizations().getMap();
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
@java.lang.Override
public long getUtilizationsOrDefault(java.lang.String key, long defaultValue) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.Long> map = internalGetUtilizations().getMap();
return map.containsKey(key) ? map.get(key) : defaultValue;
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
@java.lang.Override
public long getUtilizationsOrThrow(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
java.util.Map<java.lang.String, java.lang.Long> map = internalGetUtilizations().getMap();
if (!map.containsKey(key)) {
throw new java.lang.IllegalArgumentException();
}
return map.get(key);
}
public Builder clearUtilizations() {
bitField0_ = (bitField0_ & ~0x00000002);
internalGetMutableUtilizations().getMutableMap().clear();
return this;
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
public Builder removeUtilizations(java.lang.String key) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableUtilizations().getMutableMap().remove(key);
return this;
}
/** Use alternate mutation accessors instead. */
@java.lang.Deprecated
public java.util.Map<java.lang.String, java.lang.Long> getMutableUtilizations() {
bitField0_ |= 0x00000002;
return internalGetMutableUtilizations().getMutableMap();
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
public Builder putUtilizations(java.lang.String key, long value) {
if (key == null) {
throw new NullPointerException("map key");
}
internalGetMutableUtilizations().getMutableMap().put(key, value);
bitField0_ |= 0x00000002;
return this;
}
/**
*
*
* <pre>
* Per service utilization breakdown. The Key is the Google Cloud managed service name.
* </pre>
*
* <code>map<string, int64> utilizations = 402495121;</code>
*/
public Builder putAllUtilizations(java.util.Map<java.lang.String, java.lang.Long> values) {
internalGetMutableUtilizations().getMutableMap().putAll(values);
bitField0_ |= 0x00000002;
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation)
private static final com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation();
}
public static com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AllocationResourceStatusSpecificSKUAllocation>
PARSER =
new com.google.protobuf.AbstractParser<AllocationResourceStatusSpecificSKUAllocation>() {
@java.lang.Override
public AllocationResourceStatusSpecificSKUAllocation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException()
.setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AllocationResourceStatusSpecificSKUAllocation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AllocationResourceStatusSpecificSKUAllocation>
getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.AllocationResourceStatusSpecificSKUAllocation
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,793 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/TransformationOverview.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/dlp.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* Overview of the modifications that occurred.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.TransformationOverview}
*/
public final class TransformationOverview extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.TransformationOverview)
TransformationOverviewOrBuilder {
private static final long serialVersionUID = 0L;
// Use TransformationOverview.newBuilder() to construct.
private TransformationOverview(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TransformationOverview() {
transformationSummaries_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TransformationOverview();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_TransformationOverview_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_TransformationOverview_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.TransformationOverview.class,
com.google.privacy.dlp.v2.TransformationOverview.Builder.class);
}
public static final int TRANSFORMED_BYTES_FIELD_NUMBER = 2;
private long transformedBytes_ = 0L;
/**
*
*
* <pre>
* Total size in bytes that were transformed in some way.
* </pre>
*
* <code>int64 transformed_bytes = 2;</code>
*
* @return The transformedBytes.
*/
@java.lang.Override
public long getTransformedBytes() {
return transformedBytes_;
}
public static final int TRANSFORMATION_SUMMARIES_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private java.util.List<com.google.privacy.dlp.v2.TransformationSummary> transformationSummaries_;
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.privacy.dlp.v2.TransformationSummary>
getTransformationSummariesList() {
return transformationSummaries_;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.privacy.dlp.v2.TransformationSummaryOrBuilder>
getTransformationSummariesOrBuilderList() {
return transformationSummaries_;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
@java.lang.Override
public int getTransformationSummariesCount() {
return transformationSummaries_.size();
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.TransformationSummary getTransformationSummaries(int index) {
return transformationSummaries_.get(index);
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.TransformationSummaryOrBuilder
getTransformationSummariesOrBuilder(int index) {
return transformationSummaries_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (transformedBytes_ != 0L) {
output.writeInt64(2, transformedBytes_);
}
for (int i = 0; i < transformationSummaries_.size(); i++) {
output.writeMessage(3, transformationSummaries_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (transformedBytes_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(2, transformedBytes_);
}
for (int i = 0; i < transformationSummaries_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
3, transformationSummaries_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.TransformationOverview)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.TransformationOverview other =
(com.google.privacy.dlp.v2.TransformationOverview) obj;
if (getTransformedBytes() != other.getTransformedBytes()) return false;
if (!getTransformationSummariesList().equals(other.getTransformationSummariesList()))
return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TRANSFORMED_BYTES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getTransformedBytes());
if (getTransformationSummariesCount() > 0) {
hash = (37 * hash) + TRANSFORMATION_SUMMARIES_FIELD_NUMBER;
hash = (53 * hash) + getTransformationSummariesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.TransformationOverview parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.TransformationOverview parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.privacy.dlp.v2.TransformationOverview prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Overview of the modifications that occurred.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.TransformationOverview}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.TransformationOverview)
com.google.privacy.dlp.v2.TransformationOverviewOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_TransformationOverview_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_TransformationOverview_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.TransformationOverview.class,
com.google.privacy.dlp.v2.TransformationOverview.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.TransformationOverview.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
transformedBytes_ = 0L;
if (transformationSummariesBuilder_ == null) {
transformationSummaries_ = java.util.Collections.emptyList();
} else {
transformationSummaries_ = null;
transformationSummariesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_TransformationOverview_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.TransformationOverview getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.TransformationOverview.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.TransformationOverview build() {
com.google.privacy.dlp.v2.TransformationOverview result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.TransformationOverview buildPartial() {
com.google.privacy.dlp.v2.TransformationOverview result =
new com.google.privacy.dlp.v2.TransformationOverview(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.privacy.dlp.v2.TransformationOverview result) {
if (transformationSummariesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
transformationSummaries_ =
java.util.Collections.unmodifiableList(transformationSummaries_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.transformationSummaries_ = transformationSummaries_;
} else {
result.transformationSummaries_ = transformationSummariesBuilder_.build();
}
}
private void buildPartial0(com.google.privacy.dlp.v2.TransformationOverview result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.transformedBytes_ = transformedBytes_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.TransformationOverview) {
return mergeFrom((com.google.privacy.dlp.v2.TransformationOverview) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.privacy.dlp.v2.TransformationOverview other) {
if (other == com.google.privacy.dlp.v2.TransformationOverview.getDefaultInstance())
return this;
if (other.getTransformedBytes() != 0L) {
setTransformedBytes(other.getTransformedBytes());
}
if (transformationSummariesBuilder_ == null) {
if (!other.transformationSummaries_.isEmpty()) {
if (transformationSummaries_.isEmpty()) {
transformationSummaries_ = other.transformationSummaries_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureTransformationSummariesIsMutable();
transformationSummaries_.addAll(other.transformationSummaries_);
}
onChanged();
}
} else {
if (!other.transformationSummaries_.isEmpty()) {
if (transformationSummariesBuilder_.isEmpty()) {
transformationSummariesBuilder_.dispose();
transformationSummariesBuilder_ = null;
transformationSummaries_ = other.transformationSummaries_;
bitField0_ = (bitField0_ & ~0x00000002);
transformationSummariesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTransformationSummariesFieldBuilder()
: null;
} else {
transformationSummariesBuilder_.addAllMessages(other.transformationSummaries_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 16:
{
transformedBytes_ = input.readInt64();
bitField0_ |= 0x00000001;
break;
} // case 16
case 26:
{
com.google.privacy.dlp.v2.TransformationSummary m =
input.readMessage(
com.google.privacy.dlp.v2.TransformationSummary.parser(),
extensionRegistry);
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
transformationSummaries_.add(m);
} else {
transformationSummariesBuilder_.addMessage(m);
}
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private long transformedBytes_;
/**
*
*
* <pre>
* Total size in bytes that were transformed in some way.
* </pre>
*
* <code>int64 transformed_bytes = 2;</code>
*
* @return The transformedBytes.
*/
@java.lang.Override
public long getTransformedBytes() {
return transformedBytes_;
}
/**
*
*
* <pre>
* Total size in bytes that were transformed in some way.
* </pre>
*
* <code>int64 transformed_bytes = 2;</code>
*
* @param value The transformedBytes to set.
* @return This builder for chaining.
*/
public Builder setTransformedBytes(long value) {
transformedBytes_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Total size in bytes that were transformed in some way.
* </pre>
*
* <code>int64 transformed_bytes = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearTransformedBytes() {
bitField0_ = (bitField0_ & ~0x00000001);
transformedBytes_ = 0L;
onChanged();
return this;
}
private java.util.List<com.google.privacy.dlp.v2.TransformationSummary>
transformationSummaries_ = java.util.Collections.emptyList();
private void ensureTransformationSummariesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
transformationSummaries_ =
new java.util.ArrayList<com.google.privacy.dlp.v2.TransformationSummary>(
transformationSummaries_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.TransformationSummary,
com.google.privacy.dlp.v2.TransformationSummary.Builder,
com.google.privacy.dlp.v2.TransformationSummaryOrBuilder>
transformationSummariesBuilder_;
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public java.util.List<com.google.privacy.dlp.v2.TransformationSummary>
getTransformationSummariesList() {
if (transformationSummariesBuilder_ == null) {
return java.util.Collections.unmodifiableList(transformationSummaries_);
} else {
return transformationSummariesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public int getTransformationSummariesCount() {
if (transformationSummariesBuilder_ == null) {
return transformationSummaries_.size();
} else {
return transformationSummariesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public com.google.privacy.dlp.v2.TransformationSummary getTransformationSummaries(int index) {
if (transformationSummariesBuilder_ == null) {
return transformationSummaries_.get(index);
} else {
return transformationSummariesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public Builder setTransformationSummaries(
int index, com.google.privacy.dlp.v2.TransformationSummary value) {
if (transformationSummariesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTransformationSummariesIsMutable();
transformationSummaries_.set(index, value);
onChanged();
} else {
transformationSummariesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public Builder setTransformationSummaries(
int index, com.google.privacy.dlp.v2.TransformationSummary.Builder builderForValue) {
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
transformationSummaries_.set(index, builderForValue.build());
onChanged();
} else {
transformationSummariesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public Builder addTransformationSummaries(
com.google.privacy.dlp.v2.TransformationSummary value) {
if (transformationSummariesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTransformationSummariesIsMutable();
transformationSummaries_.add(value);
onChanged();
} else {
transformationSummariesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public Builder addTransformationSummaries(
int index, com.google.privacy.dlp.v2.TransformationSummary value) {
if (transformationSummariesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTransformationSummariesIsMutable();
transformationSummaries_.add(index, value);
onChanged();
} else {
transformationSummariesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public Builder addTransformationSummaries(
com.google.privacy.dlp.v2.TransformationSummary.Builder builderForValue) {
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
transformationSummaries_.add(builderForValue.build());
onChanged();
} else {
transformationSummariesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public Builder addTransformationSummaries(
int index, com.google.privacy.dlp.v2.TransformationSummary.Builder builderForValue) {
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
transformationSummaries_.add(index, builderForValue.build());
onChanged();
} else {
transformationSummariesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public Builder addAllTransformationSummaries(
java.lang.Iterable<? extends com.google.privacy.dlp.v2.TransformationSummary> values) {
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, transformationSummaries_);
onChanged();
} else {
transformationSummariesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public Builder clearTransformationSummaries() {
if (transformationSummariesBuilder_ == null) {
transformationSummaries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
transformationSummariesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public Builder removeTransformationSummaries(int index) {
if (transformationSummariesBuilder_ == null) {
ensureTransformationSummariesIsMutable();
transformationSummaries_.remove(index);
onChanged();
} else {
transformationSummariesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public com.google.privacy.dlp.v2.TransformationSummary.Builder
getTransformationSummariesBuilder(int index) {
return getTransformationSummariesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public com.google.privacy.dlp.v2.TransformationSummaryOrBuilder
getTransformationSummariesOrBuilder(int index) {
if (transformationSummariesBuilder_ == null) {
return transformationSummaries_.get(index);
} else {
return transformationSummariesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public java.util.List<? extends com.google.privacy.dlp.v2.TransformationSummaryOrBuilder>
getTransformationSummariesOrBuilderList() {
if (transformationSummariesBuilder_ != null) {
return transformationSummariesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(transformationSummaries_);
}
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public com.google.privacy.dlp.v2.TransformationSummary.Builder
addTransformationSummariesBuilder() {
return getTransformationSummariesFieldBuilder()
.addBuilder(com.google.privacy.dlp.v2.TransformationSummary.getDefaultInstance());
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public com.google.privacy.dlp.v2.TransformationSummary.Builder
addTransformationSummariesBuilder(int index) {
return getTransformationSummariesFieldBuilder()
.addBuilder(index, com.google.privacy.dlp.v2.TransformationSummary.getDefaultInstance());
}
/**
*
*
* <pre>
* Transformations applied to the dataset.
* </pre>
*
* <code>repeated .google.privacy.dlp.v2.TransformationSummary transformation_summaries = 3;
* </code>
*/
public java.util.List<com.google.privacy.dlp.v2.TransformationSummary.Builder>
getTransformationSummariesBuilderList() {
return getTransformationSummariesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.TransformationSummary,
com.google.privacy.dlp.v2.TransformationSummary.Builder,
com.google.privacy.dlp.v2.TransformationSummaryOrBuilder>
getTransformationSummariesFieldBuilder() {
if (transformationSummariesBuilder_ == null) {
transformationSummariesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.privacy.dlp.v2.TransformationSummary,
com.google.privacy.dlp.v2.TransformationSummary.Builder,
com.google.privacy.dlp.v2.TransformationSummaryOrBuilder>(
transformationSummaries_,
((bitField0_ & 0x00000002) != 0),
getParentForChildren(),
isClean());
transformationSummaries_ = null;
}
return transformationSummariesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.TransformationOverview)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.TransformationOverview)
private static final com.google.privacy.dlp.v2.TransformationOverview DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.TransformationOverview();
}
public static com.google.privacy.dlp.v2.TransformationOverview getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TransformationOverview> PARSER =
new com.google.protobuf.AbstractParser<TransformationOverview>() {
@java.lang.Override
public TransformationOverview parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TransformationOverview> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TransformationOverview> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.TransformationOverview getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,945 | java-monitoring-metricsscope/grpc-google-cloud-monitoring-metricsscope-v1/src/main/java/com/google/monitoring/metricsscope/v1/MetricsScopesGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.monitoring.metricsscope.v1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Manages Cloud Monitoring Metrics Scopes, and the monitoring of Google Cloud
* projects and AWS accounts.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/monitoring/metricsscope/v1/metrics_scopes.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class MetricsScopesGrpc {
private MetricsScopesGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.monitoring.metricsscope.v1.MetricsScopes";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest,
com.google.monitoring.metricsscope.v1.MetricsScope>
getGetMetricsScopeMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetMetricsScope",
requestType = com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest.class,
responseType = com.google.monitoring.metricsscope.v1.MetricsScope.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest,
com.google.monitoring.metricsscope.v1.MetricsScope>
getGetMetricsScopeMethod() {
io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest,
com.google.monitoring.metricsscope.v1.MetricsScope>
getGetMetricsScopeMethod;
if ((getGetMetricsScopeMethod = MetricsScopesGrpc.getGetMetricsScopeMethod) == null) {
synchronized (MetricsScopesGrpc.class) {
if ((getGetMetricsScopeMethod = MetricsScopesGrpc.getGetMetricsScopeMethod) == null) {
MetricsScopesGrpc.getGetMetricsScopeMethod =
getGetMetricsScopeMethod =
io.grpc.MethodDescriptor
.<com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest,
com.google.monitoring.metricsscope.v1.MetricsScope>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetMetricsScope"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.monitoring.metricsscope.v1.MetricsScope
.getDefaultInstance()))
.setSchemaDescriptor(
new MetricsScopesMethodDescriptorSupplier("GetMetricsScope"))
.build();
}
}
}
return getGetMetricsScopeMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectRequest,
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectResponse>
getListMetricsScopesByMonitoredProjectMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListMetricsScopesByMonitoredProject",
requestType =
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectRequest.class,
responseType =
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectRequest,
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectResponse>
getListMetricsScopesByMonitoredProjectMethod() {
io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectRequest,
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectResponse>
getListMetricsScopesByMonitoredProjectMethod;
if ((getListMetricsScopesByMonitoredProjectMethod =
MetricsScopesGrpc.getListMetricsScopesByMonitoredProjectMethod)
== null) {
synchronized (MetricsScopesGrpc.class) {
if ((getListMetricsScopesByMonitoredProjectMethod =
MetricsScopesGrpc.getListMetricsScopesByMonitoredProjectMethod)
== null) {
MetricsScopesGrpc.getListMetricsScopesByMonitoredProjectMethod =
getListMetricsScopesByMonitoredProjectMethod =
io.grpc.MethodDescriptor
.<com.google.monitoring.metricsscope.v1
.ListMetricsScopesByMonitoredProjectRequest,
com.google.monitoring.metricsscope.v1
.ListMetricsScopesByMonitoredProjectResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(
SERVICE_NAME, "ListMetricsScopesByMonitoredProject"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.monitoring.metricsscope.v1
.ListMetricsScopesByMonitoredProjectRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.monitoring.metricsscope.v1
.ListMetricsScopesByMonitoredProjectResponse
.getDefaultInstance()))
.setSchemaDescriptor(
new MetricsScopesMethodDescriptorSupplier(
"ListMetricsScopesByMonitoredProject"))
.build();
}
}
}
return getListMetricsScopesByMonitoredProjectMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest,
com.google.longrunning.Operation>
getCreateMonitoredProjectMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateMonitoredProject",
requestType = com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest,
com.google.longrunning.Operation>
getCreateMonitoredProjectMethod() {
io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest,
com.google.longrunning.Operation>
getCreateMonitoredProjectMethod;
if ((getCreateMonitoredProjectMethod = MetricsScopesGrpc.getCreateMonitoredProjectMethod)
== null) {
synchronized (MetricsScopesGrpc.class) {
if ((getCreateMonitoredProjectMethod = MetricsScopesGrpc.getCreateMonitoredProjectMethod)
== null) {
MetricsScopesGrpc.getCreateMonitoredProjectMethod =
getCreateMonitoredProjectMethod =
io.grpc.MethodDescriptor
.<com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "CreateMonitoredProject"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new MetricsScopesMethodDescriptorSupplier("CreateMonitoredProject"))
.build();
}
}
}
return getCreateMonitoredProjectMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest,
com.google.longrunning.Operation>
getDeleteMonitoredProjectMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteMonitoredProject",
requestType = com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest,
com.google.longrunning.Operation>
getDeleteMonitoredProjectMethod() {
io.grpc.MethodDescriptor<
com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest,
com.google.longrunning.Operation>
getDeleteMonitoredProjectMethod;
if ((getDeleteMonitoredProjectMethod = MetricsScopesGrpc.getDeleteMonitoredProjectMethod)
== null) {
synchronized (MetricsScopesGrpc.class) {
if ((getDeleteMonitoredProjectMethod = MetricsScopesGrpc.getDeleteMonitoredProjectMethod)
== null) {
MetricsScopesGrpc.getDeleteMonitoredProjectMethod =
getDeleteMonitoredProjectMethod =
io.grpc.MethodDescriptor
.<com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "DeleteMonitoredProject"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new MetricsScopesMethodDescriptorSupplier("DeleteMonitoredProject"))
.build();
}
}
}
return getDeleteMonitoredProjectMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static MetricsScopesStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<MetricsScopesStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<MetricsScopesStub>() {
@java.lang.Override
public MetricsScopesStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new MetricsScopesStub(channel, callOptions);
}
};
return MetricsScopesStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static MetricsScopesBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<MetricsScopesBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<MetricsScopesBlockingV2Stub>() {
@java.lang.Override
public MetricsScopesBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new MetricsScopesBlockingV2Stub(channel, callOptions);
}
};
return MetricsScopesBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static MetricsScopesBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<MetricsScopesBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<MetricsScopesBlockingStub>() {
@java.lang.Override
public MetricsScopesBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new MetricsScopesBlockingStub(channel, callOptions);
}
};
return MetricsScopesBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static MetricsScopesFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<MetricsScopesFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<MetricsScopesFutureStub>() {
@java.lang.Override
public MetricsScopesFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new MetricsScopesFutureStub(channel, callOptions);
}
};
return MetricsScopesFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Manages Cloud Monitoring Metrics Scopes, and the monitoring of Google Cloud
* projects and AWS accounts.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Returns a specific `Metrics Scope`.
* </pre>
*/
default void getMetricsScope(
com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest request,
io.grpc.stub.StreamObserver<com.google.monitoring.metricsscope.v1.MetricsScope>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getGetMetricsScopeMethod(), responseObserver);
}
/**
*
*
* <pre>
* Returns a list of every `Metrics Scope` that a specific `MonitoredProject`
* has been added to. The metrics scope representing the specified monitored
* project will always be the first entry in the response.
* </pre>
*/
default void listMetricsScopesByMonitoredProject(
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectRequest request,
io.grpc.stub.StreamObserver<
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListMetricsScopesByMonitoredProjectMethod(), responseObserver);
}
/**
*
*
* <pre>
* Adds a `MonitoredProject` with the given project ID
* to the specified `Metrics Scope`.
* </pre>
*/
default void createMonitoredProject(
com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateMonitoredProjectMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes a `MonitoredProject` from the specified `Metrics Scope`.
* </pre>
*/
default void deleteMonitoredProject(
com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteMonitoredProjectMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service MetricsScopes.
*
* <pre>
* Manages Cloud Monitoring Metrics Scopes, and the monitoring of Google Cloud
* projects and AWS accounts.
* </pre>
*/
public abstract static class MetricsScopesImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return MetricsScopesGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service MetricsScopes.
*
* <pre>
* Manages Cloud Monitoring Metrics Scopes, and the monitoring of Google Cloud
* projects and AWS accounts.
* </pre>
*/
public static final class MetricsScopesStub
extends io.grpc.stub.AbstractAsyncStub<MetricsScopesStub> {
private MetricsScopesStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected MetricsScopesStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new MetricsScopesStub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns a specific `Metrics Scope`.
* </pre>
*/
public void getMetricsScope(
com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest request,
io.grpc.stub.StreamObserver<com.google.monitoring.metricsscope.v1.MetricsScope>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetMetricsScopeMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Returns a list of every `Metrics Scope` that a specific `MonitoredProject`
* has been added to. The metrics scope representing the specified monitored
* project will always be the first entry in the response.
* </pre>
*/
public void listMetricsScopesByMonitoredProject(
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectRequest request,
io.grpc.stub.StreamObserver<
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListMetricsScopesByMonitoredProjectMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Adds a `MonitoredProject` with the given project ID
* to the specified `Metrics Scope`.
* </pre>
*/
public void createMonitoredProject(
com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateMonitoredProjectMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes a `MonitoredProject` from the specified `Metrics Scope`.
* </pre>
*/
public void deleteMonitoredProject(
com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteMonitoredProjectMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service MetricsScopes.
*
* <pre>
* Manages Cloud Monitoring Metrics Scopes, and the monitoring of Google Cloud
* projects and AWS accounts.
* </pre>
*/
public static final class MetricsScopesBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<MetricsScopesBlockingV2Stub> {
private MetricsScopesBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected MetricsScopesBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new MetricsScopesBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns a specific `Metrics Scope`.
* </pre>
*/
public com.google.monitoring.metricsscope.v1.MetricsScope getMetricsScope(
com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetMetricsScopeMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Returns a list of every `Metrics Scope` that a specific `MonitoredProject`
* has been added to. The metrics scope representing the specified monitored
* project will always be the first entry in the response.
* </pre>
*/
public com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectResponse
listMetricsScopesByMonitoredProject(
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectRequest
request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListMetricsScopesByMonitoredProjectMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Adds a `MonitoredProject` with the given project ID
* to the specified `Metrics Scope`.
* </pre>
*/
public com.google.longrunning.Operation createMonitoredProject(
com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateMonitoredProjectMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a `MonitoredProject` from the specified `Metrics Scope`.
* </pre>
*/
public com.google.longrunning.Operation deleteMonitoredProject(
com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteMonitoredProjectMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service MetricsScopes.
*
* <pre>
* Manages Cloud Monitoring Metrics Scopes, and the monitoring of Google Cloud
* projects and AWS accounts.
* </pre>
*/
public static final class MetricsScopesBlockingStub
extends io.grpc.stub.AbstractBlockingStub<MetricsScopesBlockingStub> {
private MetricsScopesBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected MetricsScopesBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new MetricsScopesBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns a specific `Metrics Scope`.
* </pre>
*/
public com.google.monitoring.metricsscope.v1.MetricsScope getMetricsScope(
com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetMetricsScopeMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Returns a list of every `Metrics Scope` that a specific `MonitoredProject`
* has been added to. The metrics scope representing the specified monitored
* project will always be the first entry in the response.
* </pre>
*/
public com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectResponse
listMetricsScopesByMonitoredProject(
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectRequest
request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListMetricsScopesByMonitoredProjectMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Adds a `MonitoredProject` with the given project ID
* to the specified `Metrics Scope`.
* </pre>
*/
public com.google.longrunning.Operation createMonitoredProject(
com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateMonitoredProjectMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a `MonitoredProject` from the specified `Metrics Scope`.
* </pre>
*/
public com.google.longrunning.Operation deleteMonitoredProject(
com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteMonitoredProjectMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service MetricsScopes.
*
* <pre>
* Manages Cloud Monitoring Metrics Scopes, and the monitoring of Google Cloud
* projects and AWS accounts.
* </pre>
*/
public static final class MetricsScopesFutureStub
extends io.grpc.stub.AbstractFutureStub<MetricsScopesFutureStub> {
private MetricsScopesFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected MetricsScopesFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new MetricsScopesFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Returns a specific `Metrics Scope`.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.monitoring.metricsscope.v1.MetricsScope>
getMetricsScope(com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetMetricsScopeMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Returns a list of every `Metrics Scope` that a specific `MonitoredProject`
* has been added to. The metrics scope representing the specified monitored
* project will always be the first entry in the response.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectResponse>
listMetricsScopesByMonitoredProject(
com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectRequest
request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListMetricsScopesByMonitoredProjectMethod(), getCallOptions()),
request);
}
/**
*
*
* <pre>
* Adds a `MonitoredProject` with the given project ID
* to the specified `Metrics Scope`.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
createMonitoredProject(
com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateMonitoredProjectMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes a `MonitoredProject` from the specified `Metrics Scope`.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
deleteMonitoredProject(
com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteMonitoredProjectMethod(), getCallOptions()), request);
}
}
private static final int METHODID_GET_METRICS_SCOPE = 0;
private static final int METHODID_LIST_METRICS_SCOPES_BY_MONITORED_PROJECT = 1;
private static final int METHODID_CREATE_MONITORED_PROJECT = 2;
private static final int METHODID_DELETE_MONITORED_PROJECT = 3;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_GET_METRICS_SCOPE:
serviceImpl.getMetricsScope(
(com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest) request,
(io.grpc.stub.StreamObserver<com.google.monitoring.metricsscope.v1.MetricsScope>)
responseObserver);
break;
case METHODID_LIST_METRICS_SCOPES_BY_MONITORED_PROJECT:
serviceImpl.listMetricsScopesByMonitoredProject(
(com.google.monitoring.metricsscope.v1.ListMetricsScopesByMonitoredProjectRequest)
request,
(io.grpc.stub.StreamObserver<
com.google.monitoring.metricsscope.v1
.ListMetricsScopesByMonitoredProjectResponse>)
responseObserver);
break;
case METHODID_CREATE_MONITORED_PROJECT:
serviceImpl.createMonitoredProject(
(com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_DELETE_MONITORED_PROJECT:
serviceImpl.deleteMonitoredProject(
(com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getGetMetricsScopeMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.monitoring.metricsscope.v1.GetMetricsScopeRequest,
com.google.monitoring.metricsscope.v1.MetricsScope>(
service, METHODID_GET_METRICS_SCOPE)))
.addMethod(
getListMetricsScopesByMonitoredProjectMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.monitoring.metricsscope.v1
.ListMetricsScopesByMonitoredProjectRequest,
com.google.monitoring.metricsscope.v1
.ListMetricsScopesByMonitoredProjectResponse>(
service, METHODID_LIST_METRICS_SCOPES_BY_MONITORED_PROJECT)))
.addMethod(
getCreateMonitoredProjectMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.monitoring.metricsscope.v1.CreateMonitoredProjectRequest,
com.google.longrunning.Operation>(service, METHODID_CREATE_MONITORED_PROJECT)))
.addMethod(
getDeleteMonitoredProjectMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.monitoring.metricsscope.v1.DeleteMonitoredProjectRequest,
com.google.longrunning.Operation>(service, METHODID_DELETE_MONITORED_PROJECT)))
.build();
}
private abstract static class MetricsScopesBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
MetricsScopesBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.monitoring.metricsscope.v1.MetricsScopesProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("MetricsScopes");
}
}
private static final class MetricsScopesFileDescriptorSupplier
extends MetricsScopesBaseDescriptorSupplier {
MetricsScopesFileDescriptorSupplier() {}
}
private static final class MetricsScopesMethodDescriptorSupplier
extends MetricsScopesBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
MetricsScopesMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (MetricsScopesGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new MetricsScopesFileDescriptorSupplier())
.addMethod(getGetMetricsScopeMethod())
.addMethod(getListMetricsScopesByMonitoredProjectMethod())
.addMethod(getCreateMonitoredProjectMethod())
.addMethod(getDeleteMonitoredProjectMethod())
.build();
}
}
}
return result;
}
}
|
apache/oodt | 35,750 | workflow/src/main/java/org/apache/oodt/cas/workflow/instrepo/LuceneWorkflowInstanceRepository.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.workflow.instrepo;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LogDocMergePolicy;
import org.apache.lucene.index.LogMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.util.BytesRef;
import org.apache.oodt.cas.metadata.Metadata;
import org.apache.oodt.cas.workflow.lifecycle.WorkflowLifecycleStage;
import org.apache.oodt.cas.workflow.lifecycle.WorkflowState;
import org.apache.oodt.cas.workflow.structs.Priority;
import org.apache.oodt.cas.workflow.structs.Workflow;
import org.apache.oodt.cas.workflow.structs.WorkflowCondition;
import org.apache.oodt.cas.workflow.structs.WorkflowInstance;
import org.apache.oodt.cas.workflow.structs.WorkflowTask;
import org.apache.oodt.cas.workflow.structs.WorkflowTaskConfiguration;
import org.apache.oodt.cas.workflow.structs.exceptions.InstanceRepositoryException;
import org.safehaus.uuid.UUID;
import org.safehaus.uuid.UUIDGenerator;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Vector;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* @author mattmann
* @version $Revision$
*
* <p>
* An implementation of the {@link WorkflowEngine} interface that is backed by
* <a href="http://lucene.apache.org">Apache Lucene</a>.
* </p>.
*/
public class LuceneWorkflowInstanceRepository extends
AbstractPaginatibleInstanceRepository {
Directory indexDir = null;
private DirectoryReader reader;
/* the path to the index directory for this catalog */
public static final int MERGE_FACTOR = 20;
/* path to lucene index directory to store wInst info */
private String idxFilePath = null;
/* our log stream */
private static final Logger LOG = Logger
.getLogger(LuceneWorkflowInstanceRepository.class.getName());
/* our workflow inst id generator */
private static UUIDGenerator generator = UUIDGenerator.getInstance();
private int mergeFactor = 20;
/**
*
*/
public LuceneWorkflowInstanceRepository(String idxPath, int pageSize) {
this.idxFilePath = idxPath;
this.pageSize = pageSize;
try {
indexDir = FSDirectory.open(new File( idxFilePath ).toPath());
} catch (IOException e) {
e.printStackTrace();
}
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.workflow.instrepo.WorkflowInstanceRepository#getNumWorkflowInstances()
*/
public int getNumWorkflowInstances() throws InstanceRepositoryException {
IndexSearcher searcher = null;
int numInsts = -1;
try {
reader = DirectoryReader.open(indexDir);
} catch (IOException e) {
e.printStackTrace();
}
try {
searcher = new IndexSearcher(reader);
Term instIdTerm = new Term("myfield", "myvalue");
org.apache.lucene.search.Query query = new TermQuery(instIdTerm);
Sort sort = new Sort(new SortField("workflow_inst_startdatetime",
SortField.Type.STRING, true));
TopDocs topDocs = searcher.search(query, 1, sort);
numInsts = topDocs.totalHits;
} catch (IOException e) {
LOG.log(Level.WARNING,
"IOException when opening index directory: [" + idxFilePath
+ "] for search: Message: " + e.getMessage());
throw new InstanceRepositoryException(e.getMessage());
} finally {
if (searcher != null) {
try {
//TODO Shutdown searcher
} catch (Exception ignore) {
}
}
}
return numInsts;
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.workflow.instrepo.WorkflowInstanceRepository#getNumWorkflowInstancesByStatus(java.lang.String)
*/
public int getNumWorkflowInstancesByStatus(String status)
throws InstanceRepositoryException {
IndexSearcher searcher = null;
int numInsts = -1;
try {
reader = DirectoryReader.open(indexDir);
} catch (IOException e) {
e.printStackTrace();
}
try {
searcher = new IndexSearcher(reader);
Term instIdTerm = new Term("workflow_inst_status", status);
org.apache.lucene.search.Query query = new TermQuery(instIdTerm);
Sort sort = new Sort(new SortField("workflow_inst_startdatetime",
SortField.Type.STRING, true));
TopDocs topDocs = searcher.search(query, 1, sort);
numInsts = topDocs.totalHits;
} catch (IOException e) {
LOG.log(Level.WARNING,
"IOException when opening index directory: [" + idxFilePath
+ "] for search: Message: " + e.getMessage());
throw new InstanceRepositoryException(e.getMessage());
} finally {
if (searcher != null) {
try {
//TODO Shutdown searcher
} catch (Exception ignore) {
}
}
}
return numInsts;
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.workflow.instrepo.WorkflowInstanceRepository#addWorkflowInstance(org.apache.oodt.cas.workflow.structs.WorkflowInstance)
*/
public synchronized void addWorkflowInstance(WorkflowInstance wInst)
throws InstanceRepositoryException {
// generate UUID for inst
UUID uuid = UUIDGenerator.getInstance().generateTimeBasedUUID();
wInst.setId(uuid.toString());
addWorkflowInstanceToCatalog(wInst);
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.workflow.instrepo.WorkflowInstanceRepository#removeWorkflowInstance(org.apache.oodt.cas.workflow.structs.WorkflowInstance)
*/
public synchronized void removeWorkflowInstance(WorkflowInstance wInst)
throws InstanceRepositoryException {
removeWorkflowInstanceDocument(wInst);
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.workflow.instrepo.WorkflowInstanceRepository#updateWorkflowInstance(org.apache.oodt.cas.workflow.structs.WorkflowInstance)
*/
public synchronized void updateWorkflowInstance(WorkflowInstance wInst)
throws InstanceRepositoryException {
removeWorkflowInstanceDocument(wInst);
addWorkflowInstanceToCatalog(wInst);
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.workflow.instrepo.WorkflowInstanceRepository#getWorkflowInstanceById(java.lang.String)
*/
public WorkflowInstance getWorkflowInstanceById(String workflowInstId)
throws InstanceRepositoryException {
IndexSearcher searcher = null;
WorkflowInstance wInst = null;
try {
reader = DirectoryReader.open(indexDir);
} catch (IOException e) {
e.printStackTrace();
}
try {
searcher = new IndexSearcher(reader);
Term instIdTerm = new Term("workflow_inst_id", workflowInstId);
org.apache.lucene.search.Query query = new TermQuery(instIdTerm);
TopDocs check = searcher.search(query, 1);
if (check.totalHits != 1) {
LOG.log(Level.WARNING, "The workflow instance: ["
+ workflowInstId + "] is not being "
+ "managed by this " + "workflow engine, or "
+ "is not unique in the catalog: num hits: ["+check.totalHits+"]");
return null;
} else {
TopDocs topDocs = searcher.search(query, check.totalHits);
ScoreDoc[] hits = topDocs.scoreDocs;
Document instDoc = searcher.doc(hits[0].doc);
wInst = toWorkflowInstance(instDoc);
}
} catch (IOException e) {
LOG.log(Level.WARNING,
"IOException when opening index directory: [" + idxFilePath
+ "] for search: Message: " + e.getMessage());
throw new InstanceRepositoryException(e.getMessage());
} finally {
if (searcher != null) {
try {
//TODO Shutdown searcher
} catch (Exception ignore) {
}
}
}
return wInst;
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.workflow.instrepo.WorkflowInstanceRepository#getWorkflowInstances()
*/
public List getWorkflowInstances() throws InstanceRepositoryException {
IndexSearcher searcher = null;
List wInsts = null;
try {
reader = DirectoryReader.open(indexDir);
} catch (IOException e) {
e.printStackTrace();
}
try {
searcher = new IndexSearcher(reader);
Term instIdTerm = new Term("myfield", "myvalue");
org.apache.lucene.search.Query query = new TermQuery(instIdTerm);
Sort sort = new Sort(new SortField("workflow_inst_startdatetime",
SortField.Type.STRING, true));
TopDocs check = searcher.search(query, 1, sort);
if(check.totalHits>0) {
TopDocs topDocs = searcher.search(query, check.totalHits, sort);
ScoreDoc[] hits = topDocs.scoreDocs;
if (topDocs.totalHits > 0) {
wInsts = new Vector(hits.length);
for (ScoreDoc hit : hits) {
Document doc = searcher.doc(hit.doc);
WorkflowInstance wInst = toWorkflowInstance(doc);
wInsts.add(wInst);
}
}
}
} catch (IOException e) {
LOG.log(Level.WARNING,
"IOException when opening index directory: [" + idxFilePath
+ "] for search: Message: " + e.getMessage());
throw new InstanceRepositoryException(e.getMessage());
} finally {
if (searcher != null) {
try {
//TODO Shutdown searcher
} catch (Exception ignore) {
}
}
}
return wInsts;
}
@Override
public synchronized boolean clearWorkflowInstances() throws InstanceRepositoryException {
IndexWriter writer = null;
try {
IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer());
config.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
LogMergePolicy lmp =new LogDocMergePolicy();
lmp.setMergeFactor(mergeFactor);
config.setMergePolicy(lmp);
writer = new IndexWriter(indexDir, config);
LOG.log(Level.FINE,
"LuceneWorkflowEngine: remove all workflow instances");
writer.deleteDocuments(new Term("myfield", "myvalue"));
} catch (IOException e) {
LOG.log(Level.SEVERE, e.getMessage());
LOG
.log(Level.WARNING,
"Exception removing workflow instances from index: Message: "
+ e.getMessage());
throw new InstanceRepositoryException(e.getMessage());
} finally {
if (writer != null){
try{
writer.close();
}
catch(Exception ignore){}
writer = null;
}
}
return true;
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.workflow.instrepo.WorkflowInstanceRepository#getWorkflowInstancesByStatus(java.lang.String)
*/
public List getWorkflowInstancesByStatus(String status)
throws InstanceRepositoryException {
IndexSearcher searcher = null;
List wInsts = null;
try {
reader = DirectoryReader.open(indexDir);
} catch (IOException e) {
e.printStackTrace();
}
try {
searcher = new IndexSearcher(reader);
Term instIdTerm = new Term("workflow_inst_status", status);
org.apache.lucene.search.Query query = new TermQuery(instIdTerm);
Sort sort = new Sort(new SortField("workflow_inst_startdatetime",
SortField.Type.STRING, true));
TopDocs check = searcher.search(query, 1, sort);
if(check.totalHits>0) {
TopDocs topDocs = searcher.search(query, check.totalHits, sort);
ScoreDoc[] hits = topDocs.scoreDocs;
if (hits.length > 0) {
wInsts = new Vector(hits.length);
for (ScoreDoc hit : hits) {
Document doc = searcher.doc(hit.doc);
WorkflowInstance wInst = toWorkflowInstance(doc);
wInsts.add(wInst);
}
}
}
} catch (IOException e) {
LOG.log(Level.WARNING,
"IOException when opening index directory: [" + idxFilePath
+ "] for search: Message: " + e.getMessage());
throw new InstanceRepositoryException(e.getMessage());
} finally {
if (searcher != null) {
try {
//TODO Shutdown searcher
} catch (Exception ignore) {
}
}
}
return wInsts;
}
/*
* (non-Javadoc)
*
* @see org.apache.oodt.cas.workflow.instrepo.AbstractPaginatibleInstanceRepository#paginateWorkflows(int,
* java.lang.String)
*/
protected List paginateWorkflows(int pageNum, String status)
throws InstanceRepositoryException {
List instIds = null;
IndexSearcher searcher = null;
try {
reader = DirectoryReader.open(indexDir);
} catch (IOException e) {
e.printStackTrace();
}
try {
searcher = new IndexSearcher(reader);
// construct a Boolean query here
BooleanQuery.Builder booleanQuery = new BooleanQuery.Builder();
Term instIdTerm = new Term("myfield", "myvalue");
if (status != null) {
Term statusTerm = new Term("workflow_inst_status", status);
booleanQuery.add(new TermQuery(statusTerm),
BooleanClause.Occur.MUST);
}
booleanQuery.add(new TermQuery(instIdTerm),
BooleanClause.Occur.MUST);
Sort sort = new Sort(new SortField("workflow_inst_startdatetime",
SortField.Type.STRING, true));
LOG.log(Level.FINE,
"Querying LuceneWorkflowInstanceRepository: q: ["
+ booleanQuery + "]");
TopDocs check = searcher.search(booleanQuery.build(), 1, sort);
if(check.totalHits>0) {
TopDocs topDocs = searcher.search(booleanQuery.build(), check.totalHits, sort);
ScoreDoc[] hits = topDocs.scoreDocs;
if (hits.length > 0) {
int startNum = (pageNum - 1) * pageSize;
if (startNum > hits.length) {
startNum = 0;
}
instIds = new Vector(pageSize);
for (int i = startNum; i < Math.min(hits.length,
(startNum + pageSize)); i++) {
Document instDoc = searcher.doc(hits[i].doc);
WorkflowInstance inst = toWorkflowInstance(instDoc);
instIds.add(inst.getId());
}
} else {
LOG.log(Level.WARNING, "No workflow instances found "
+ "when attempting to paginate!");
}
}
} catch (IOException e) {
LOG.log(Level.WARNING,
"IOException when opening index directory: [" + idxFilePath
+ "] for search: Message: " + e.getMessage());
throw new InstanceRepositoryException(e.getMessage());
} finally {
if (searcher != null) {
try {
//TODO Shutdown searcher
} catch (Exception ignore) {
}
}
}
return instIds;
}
private synchronized void removeWorkflowInstanceDocument(
WorkflowInstance inst) throws InstanceRepositoryException {
IndexReader reader = null;
try {
reader = DirectoryReader.open(indexDir);
} catch (IOException e) {
e.printStackTrace();
}
try {
reader = DirectoryReader.open(indexDir);
IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer());
config.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
LogMergePolicy lmp =new LogDocMergePolicy();
lmp.setMergeFactor(mergeFactor);
config.setMergePolicy(lmp);
IndexWriter writer = new IndexWriter(indexDir, config);
LOG.log(Level.FINE,
"LuceneWorkflowEngine: remove document from index for workflow instance: ["
+ inst.getId() + "]");
writer.deleteDocuments(new Term("workflow_inst_id", inst.getId()));
writer.close();
} catch (IOException e) {
LOG.log(Level.SEVERE, e.getMessage());
LOG
.log(Level.WARNING,
"Exception removing workflow instance: ["
+ inst.getId() + "] from index: Message: "
+ e.getMessage());
throw new InstanceRepositoryException(e.getMessage());
} finally {
if (reader != null) {
try {
reader.close();
} catch (Exception ignore) {
}
}
}
}
private synchronized void addWorkflowInstanceToCatalog(
WorkflowInstance wInst) throws InstanceRepositoryException {
IndexWriter writer = null;
try {
IndexWriterConfig config = new IndexWriterConfig(new StandardAnalyzer());
config.setOpenMode(IndexWriterConfig.OpenMode.CREATE_OR_APPEND);
LogMergePolicy lmp =new LogDocMergePolicy();
lmp.setMergeFactor(mergeFactor);
config.setMergePolicy(lmp);
writer = new IndexWriter(indexDir, config);
Document doc = toDoc(wInst);
writer.addDocument(doc);
} catch (IOException e) {
LOG.log(Level.WARNING, "Unable to index workflow instance: ["
+ wInst.getId() + "]: Message: " + e.getMessage());
throw new InstanceRepositoryException(
"Unable to index workflow instance: [" + wInst.getId()
+ "]: Message: " + e.getMessage());
} finally {
try {
writer.close();
} catch (Exception e) {
System.out.println(e);
}
}
}
private Document toDoc(WorkflowInstance workflowInst) {
Document doc = new Document();
// store the workflow instance info first
doc.add(new Field("workflow_inst_id", workflowInst.getId(),
StringField.TYPE_STORED));
doc.add(new Field("workflow_inst_timesblocked",
String.valueOf(workflowInst.getTimesBlocked()), StringField.TYPE_STORED));
// will leave this for back compat, but will also store
// category
doc.add(new Field("workflow_inst_status", workflowInst.getStatus(),
StringField.TYPE_STORED));
if(workflowInst.getState() != null){
WorkflowState state = workflowInst.getState();
if(state.getDescription() != null){
doc.add(new Field("workflow_inst_state_desc",
state.getDescription(), StringField.TYPE_STORED));
}
if(state.getMessage() != null){
doc.add(new Field("workflow_inst_state_message",
state.getMessage(), StringField.TYPE_STORED));
}
if(state.getCategory() != null && state.getCategory().getName() != null){
doc.add(new Field("workflow_inst_state_category",
state.getCategory().getName(), StringField.TYPE_STORED));
}
}
doc
.add(new Field("workflow_inst_current_task_id", workflowInst
.getCurrentTaskId(), StringField.TYPE_STORED));
doc
.add(new Field(
"workflow_inst_currenttask_startdatetime",
workflowInst.getCurrentTaskStartDateTimeIsoStr() != null ? workflowInst
.getCurrentTaskStartDateTimeIsoStr()
: "", StringField.TYPE_STORED));
doc.add(new SortedDocValuesField("workflow_inst_currenttask_startdatetime", new BytesRef(workflowInst.getCurrentTaskStartDateTimeIsoStr() != null ? workflowInst
.getCurrentTaskStartDateTimeIsoStr()
: "")));
doc.add(new Field("workflow_inst_currenttask_enddatetime", workflowInst
.getCurrentTaskEndDateTimeIsoStr() != null ? workflowInst
.getCurrentTaskEndDateTimeIsoStr() : "", StringField.TYPE_STORED));
doc.add(new SortedDocValuesField("workflow_inst_currenttask_enddatetime", new BytesRef(workflowInst
.getCurrentTaskEndDateTimeIsoStr() != null ? workflowInst
.getCurrentTaskEndDateTimeIsoStr() : "")));
doc.add(new Field("workflow_inst_startdatetime", workflowInst
.getStartDateTimeIsoStr() != null ? workflowInst
.getStartDateTimeIsoStr() : "", StringField.TYPE_STORED));
doc.add(new SortedDocValuesField("workflow_inst_startdatetime", new BytesRef(workflowInst
.getStartDateTimeIsoStr() != null ? workflowInst
.getStartDateTimeIsoStr() : "")));
doc.add(new Field("workflow_inst_enddatetime", workflowInst
.getEndDateTimeIsoStr() != null ? workflowInst
.getEndDateTimeIsoStr() : "", StringField.TYPE_STORED));
doc.add(new SortedDocValuesField("workflow_inst_enddatetime", new BytesRef(workflowInst
.getEndDateTimeIsoStr() != null ? workflowInst
.getEndDateTimeIsoStr() : "")));
doc.add(new Field("workflow_inst_priority",
workflowInst.getPriority() != null ?
String.valueOf(workflowInst.getPriority().getValue()):
String.valueOf(Priority.getDefault().getValue()),
StringField.TYPE_STORED));
// add all metadata
addInstanceMetadataToDoc(doc, workflowInst.getSharedContext());
// store the workflow info too
doc.add(new Field("workflow_id", workflowInst.getWorkflow().getId(),
StringField.TYPE_STORED));
doc.add(new Field("workflow_name",
workflowInst.getWorkflow().getName(), StringField.TYPE_STORED));
// store the tasks
addTasksToDoc(doc, workflowInst.getWorkflow().getTasks());
// store workflow conditions
addConditionsToDoc("workflow_condition_"+workflowInst.getWorkflow().getId(),
workflowInst.getWorkflow().getConditions()
, doc);
// add the default field (so that we can do a query for *)
doc.add(new Field("myfield", "myvalue", StringField.TYPE_STORED));
return doc;
}
private void addInstanceMetadataToDoc(Document doc, Metadata met) {
if (met != null && met.getMap().keySet().size() > 0) {
for (String metKey : met.getMap().keySet()) {
List metVals = met.getAllMetadata(metKey);
if (metVals != null && metVals.size() > 0) {
for (Object metVal1 : metVals) {
String metVal = (String) metVal1;
doc.add(new Field(metKey, metVal, StringField.TYPE_STORED));
}
// now index the field name so that we can use it to
// look it up when converting from doc to
// WorkflowInstance
doc.add(new Field("workflow_inst_met_flds", metKey,
StringField.TYPE_STORED));
}
}
}
}
private void addTasksToDoc(Document doc, List tasks) {
if (tasks != null && tasks.size() > 0) {
for (Object task1 : tasks) {
WorkflowTask task = (WorkflowTask) task1;
doc.add(new Field("task_id", task.getTaskId(), StringField.TYPE_STORED));
doc.add(new Field("task_name", task.getTaskName(),
StringField.TYPE_STORED));
doc.add(new Field("task_order",
String.valueOf(task.getOrder()), StringField.TYPE_STORED));
doc.add(new Field("task_class",
task.getTaskInstanceClassName(), StringField.TYPE_STORED));
addConditionsToDoc(task.getTaskId(), task.getConditions(), doc);
addTaskConfigToDoc(task.getTaskId(), task.getTaskConfig(), doc);
}
}
}
private void addTaskConfigToDoc(String taskId,
WorkflowTaskConfiguration config, Document doc) {
if (config != null) {
for (Object o : config.getProperties().keySet()) {
String propName = (String) o;
String propValue = config.getProperty(propName);
doc.add(new Field(taskId + "_config_property_name", propName,
StringField.TYPE_STORED));
doc.add(new Field(taskId + "_config_property_value", propValue,
StringField.TYPE_STORED));
}
}
}
private void addConditionsToDoc(String taskId, List conditionList,
Document doc) {
if (conditionList != null && conditionList.size() > 0) {
for (Object aConditionList : conditionList) {
WorkflowCondition cond = (WorkflowCondition) aConditionList;
doc.add(new Field(taskId + "_condition_name", cond.getConditionName(),
StringField.TYPE_STORED));
doc.add(new Field(taskId + "_condition_id", cond.getConditionId(),
StringField.TYPE_STORED));
doc.add(new Field(taskId + "_condition_class", cond
.getConditionInstanceClassName(),StringField.TYPE_STORED));
doc.add(new Field(taskId + "_condition_order", String.valueOf(cond
.getOrder()), StringField.TYPE_STORED));
doc.add(new Field(taskId + "_condition_timeout", String.valueOf(cond
.getTimeoutSeconds()), StringField.TYPE_STORED));
doc.add(new Field(taskId + "_condition_optional", String.valueOf(cond.isOptional()),
StringField.TYPE_STORED));
}
}
}
private WorkflowInstance toWorkflowInstance(Document doc) {
WorkflowInstance inst = new WorkflowInstance();
// first read all the instance info
inst.setId(doc.get("workflow_inst_id"));
inst.setTimesBlocked(Integer.parseInt(doc.get("workflow_inst_timesblocked") !=
null ? doc.get("workflow_inst_timesblocked"):"0"));
// try and construct a state
WorkflowState state = new WorkflowState();
state.setName(doc.get("workflow_inst_status"));
if(doc.get("workflow_inst_state_category") != null){
WorkflowLifecycleStage category = new WorkflowLifecycleStage();
category.setName(doc.get("workflow_inst_state_category"));
state.setCategory(category);
}
if(doc.get("workflow_inst_state_desc") != null){
state.setDescription(doc.get("workflow_inst_state_desc"));
}
if(doc.get("workflow_inst_state_message") != null){
state.setMessage(doc.get("workflow_inst_state_message"));
}
inst.setState(state);
inst.setCurrentTaskId(doc.get("workflow_inst_current_task_id"));
inst.setCurrentTaskStartDateTimeIsoStr(doc
.get("workflow_inst_currenttask_startdatetime"));
inst.setCurrentTaskEndDateTimeIsoStr(doc
.get("workflow_inst_currenttask_enddatetime"));
inst.setStartDateTimeIsoStr(doc.get("workflow_inst_startdatetime"));
inst.setEndDateTimeIsoStr(doc.get("workflow_inst_enddatetime"));
inst.setPriority(Priority.getPriority(doc.get("workflow_inst_priority") != null ?
Double.valueOf(doc.get("workflow_inst_priority")):Priority.getDefault().getValue()));
// read the workflow instance metadata
Metadata sharedContext = new Metadata();
String[] instMetFields = doc.getValues("workflow_inst_met_flds");
if (instMetFields != null && instMetFields.length > 0) {
for (String fldName : instMetFields) {
String[] vals = doc.getValues(fldName);
if (vals != null && vals.length > 0) {
for (String val : vals) {
sharedContext.addMetadata(fldName, val);
}
}
}
}
inst.setSharedContext(sharedContext);
// now read all of the workflow info
Workflow workflow = new Workflow();
workflow.setId(doc.get("workflow_id"));
workflow.setName(doc.get("workflow_name"));
workflow.setTasks(toTasks(doc));
workflow.setConditions(toConditions("workflow_condition_"+workflow.getId(), doc));
inst.setWorkflow(workflow);
return inst;
}
private List toTasks(Document doc) {
List taskList = new Vector();
String[] taskIds = doc.getValues("task_id");
String[] taskNames = doc.getValues("task_name");
String[] taskOrders = doc.getValues("task_order");
String[] taskClasses = doc.getValues("task_class");
if (taskIds.length != taskNames.length
|| taskIds.length != taskOrders.length
|| taskIds.length != taskClasses.length) {
LOG.log(Level.WARNING,
"task arrays are not of same size when rebuilding "
+ "task list from Document!");
return null;
}
for (int i = 0; i < taskIds.length; i++) {
WorkflowTask task = new WorkflowTask();
task.setOrder(Integer.parseInt(taskOrders[i]));
task.setTaskName(taskNames[i]);
task.setTaskId(taskIds[i]);
task.setTaskInstanceClassName(taskClasses[i]);
task.setConditions(toConditions(task.getTaskId(), doc));
task.setTaskConfig(toTaskConfig(task.getTaskId(), doc));
taskList.add(task);
}
return taskList;
}
private WorkflowTaskConfiguration toTaskConfig(String taskId, Document doc) {
WorkflowTaskConfiguration taskConfig = new WorkflowTaskConfiguration();
String[] propNames = doc.getValues(taskId + "_config_property_name");
String[] propValues = doc.getValues(taskId + "_config_property_value");
if (propNames == null) {
return taskConfig;
}
if (propNames.length != propValues.length) {
LOG.log(Level.WARNING,
"Task Config prop name and value arrays are not "
+ "of same size!");
return null;
}
for (int i = 0; i < propNames.length; i++) {
taskConfig.addConfigProperty(propNames[i], propValues[i]);
}
return taskConfig;
}
private List toConditions(String taskId, Document doc) {
List condList = new Vector();
String[] condNames = doc.getValues(taskId + "_condition_name");
String[] condClasses = doc.getValues(taskId + "_condition_class");
String[] condOrders = doc.getValues(taskId + "_condition_order");
String[] condIds = doc.getValues(taskId + "_condition_id");
String[] condTimeouts = doc.getValues(taskId+"_condition_timeout");
String[] condOptionals = doc.getValues(taskId+"_condition_optional");
if (condNames == null) {
return condList;
}
if (condNames.length != condClasses.length
|| condNames.length != condOrders.length
|| condNames.length != condIds.length
|| (condTimeouts != null && condNames.length != condTimeouts.length)
|| (condOptionals != null && condNames.length != condOptionals.length)) {
LOG.log(Level.WARNING,
"Condition arrays are not of same size when "
+ "rebuilding from given Document");
return null;
}
for (int i = 0; i < condNames.length; i++) {
WorkflowCondition cond = new WorkflowCondition();
cond.setConditionId(condIds[i]);
cond.setConditionInstanceClassName(condClasses[i]);
cond.setConditionName(condNames[i]);
cond.setOrder(Integer.parseInt(condOrders[i]));
if(condTimeouts != null){
cond.setTimeoutSeconds(Long.parseLong(condTimeouts[i]));
}
if(condOptionals != null){
cond.setOptional(Boolean.valueOf(condOptionals[i]));
}
condList.add(cond);
}
return condList;
}
}
|
googleapis/google-cloud-java | 35,692 | java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetIamPolicyNodeTemplateRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/compute/v1/compute.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.compute.v1;
/**
*
*
* <pre>
* A request message for NodeTemplates.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest}
*/
public final class GetIamPolicyNodeTemplateRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest)
GetIamPolicyNodeTemplateRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use GetIamPolicyNodeTemplateRequest.newBuilder() to construct.
private GetIamPolicyNodeTemplateRequest(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GetIamPolicyNodeTemplateRequest() {
project_ = "";
region_ = "";
resource_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GetIamPolicyNodeTemplateRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyNodeTemplateRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyNodeTemplateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest.class,
com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest.Builder.class);
}
private int bitField0_;
public static final int OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER = 499220029;
private int optionsRequestedPolicyVersion_ = 0;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
public static final int PROJECT_FIELD_NUMBER = 227560217;
@SuppressWarnings("serial")
private volatile java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
@java.lang.Override
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
@java.lang.Override
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int REGION_FIELD_NUMBER = 138946292;
@SuppressWarnings("serial")
private volatile java.lang.Object region_ = "";
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The region.
*/
@java.lang.Override
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
}
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for region.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RESOURCE_FIELD_NUMBER = 195806222;
@SuppressWarnings("serial")
private volatile java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
@java.lang.Override
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
@java.lang.Override
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeInt32(499220029, optionsRequestedPolicyVersion_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(138946292, region_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeInt32Size(
499220029, optionsRequestedPolicyVersion_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest)) {
return super.equals(obj);
}
com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest other =
(com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest) obj;
if (hasOptionsRequestedPolicyVersion() != other.hasOptionsRequestedPolicyVersion())
return false;
if (hasOptionsRequestedPolicyVersion()) {
if (getOptionsRequestedPolicyVersion() != other.getOptionsRequestedPolicyVersion())
return false;
}
if (!getProject().equals(other.getProject())) return false;
if (!getRegion().equals(other.getRegion())) return false;
if (!getResource().equals(other.getResource())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasOptionsRequestedPolicyVersion()) {
hash = (37 * hash) + OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER;
hash = (53 * hash) + getOptionsRequestedPolicyVersion();
}
hash = (37 * hash) + PROJECT_FIELD_NUMBER;
hash = (53 * hash) + getProject().hashCode();
hash = (37 * hash) + REGION_FIELD_NUMBER;
hash = (53 * hash) + getRegion().hashCode();
hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
hash = (53 * hash) + getResource().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A request message for NodeTemplates.GetIamPolicy. See the method description for details.
* </pre>
*
* Protobuf type {@code google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest)
com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyNodeTemplateRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyNodeTemplateRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest.class,
com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest.Builder.class);
}
// Construct using com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
optionsRequestedPolicyVersion_ = 0;
project_ = "";
region_ = "";
resource_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.compute.v1.Compute
.internal_static_google_cloud_compute_v1_GetIamPolicyNodeTemplateRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest getDefaultInstanceForType() {
return com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest build() {
com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest buildPartial() {
com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest result =
new com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.optionsRequestedPolicyVersion_ = optionsRequestedPolicyVersion_;
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.project_ = project_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.region_ = region_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.resource_ = resource_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest) {
return mergeFrom((com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest other) {
if (other == com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest.getDefaultInstance())
return this;
if (other.hasOptionsRequestedPolicyVersion()) {
setOptionsRequestedPolicyVersion(other.getOptionsRequestedPolicyVersion());
}
if (!other.getProject().isEmpty()) {
project_ = other.project_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getRegion().isEmpty()) {
region_ = other.region_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getResource().isEmpty()) {
resource_ = other.resource_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 1111570338:
{
region_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 1111570338
case 1566449778:
{
resource_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 1566449778
case 1820481738:
{
project_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 1820481738
case -301207064:
{
optionsRequestedPolicyVersion_ = input.readInt32();
bitField0_ |= 0x00000001;
break;
} // case -301207064
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int optionsRequestedPolicyVersion_;
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return Whether the optionsRequestedPolicyVersion field is set.
*/
@java.lang.Override
public boolean hasOptionsRequestedPolicyVersion() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return The optionsRequestedPolicyVersion.
*/
@java.lang.Override
public int getOptionsRequestedPolicyVersion() {
return optionsRequestedPolicyVersion_;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @param value The optionsRequestedPolicyVersion to set.
* @return This builder for chaining.
*/
public Builder setOptionsRequestedPolicyVersion(int value) {
optionsRequestedPolicyVersion_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Requested IAM Policy version.
* </pre>
*
* <code>optional int32 options_requested_policy_version = 499220029;</code>
*
* @return This builder for chaining.
*/
public Builder clearOptionsRequestedPolicyVersion() {
bitField0_ = (bitField0_ & ~0x00000001);
optionsRequestedPolicyVersion_ = 0;
onChanged();
return this;
}
private java.lang.Object project_ = "";
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The project.
*/
public java.lang.String getProject() {
java.lang.Object ref = project_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
project_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for project.
*/
public com.google.protobuf.ByteString getProjectBytes() {
java.lang.Object ref = project_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
project_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The project to set.
* @return This builder for chaining.
*/
public Builder setProject(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearProject() {
project_ = getDefaultInstance().getProject();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Project ID for this request.
* </pre>
*
* <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for project to set.
* @return This builder for chaining.
*/
public Builder setProjectBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
project_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object region_ = "";
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The region.
*/
public java.lang.String getRegion() {
java.lang.Object ref = region_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
region_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for region.
*/
public com.google.protobuf.ByteString getRegionBytes() {
java.lang.Object ref = region_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
region_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The region to set.
* @return This builder for chaining.
*/
public Builder setRegion(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
region_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearRegion() {
region_ = getDefaultInstance().getRegion();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The name of the region for this request.
* </pre>
*
* <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for region to set.
* @return This builder for chaining.
*/
public Builder setRegionBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
region_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object resource_ = "";
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The resource.
*/
public java.lang.String getResource() {
java.lang.Object ref = resource_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
resource_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for resource.
*/
public com.google.protobuf.ByteString getResourceBytes() {
java.lang.Object ref = resource_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
resource_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The resource to set.
* @return This builder for chaining.
*/
public Builder setResource(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
resource_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearResource() {
resource_ = getDefaultInstance().getResource();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name or id of the resource for this request.
* </pre>
*
* <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for resource to set.
* @return This builder for chaining.
*/
public Builder setResourceBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
resource_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest)
private static final com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest();
}
public static com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GetIamPolicyNodeTemplateRequest> PARSER =
new com.google.protobuf.AbstractParser<GetIamPolicyNodeTemplateRequest>() {
@java.lang.Override
public GetIamPolicyNodeTemplateRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GetIamPolicyNodeTemplateRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GetIamPolicyNodeTemplateRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.compute.v1.GetIamPolicyNodeTemplateRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
oracle/nosql | 36,011 | kvmain/src/main/java/oracle/kv/impl/api/table/IndexScan.java | /*-
* Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved.
*
* This file was distributed by Oracle as part of a version of Oracle NoSQL
* Database made available at:
*
* http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html
*
* Please see the LICENSE file included in the top-level directory of the
* appropriate version of Oracle NoSQL Database for a copy of the license and
* additional information.
*/
package oracle.kv.impl.api.table;
import static oracle.kv.impl.async.FutureUtils.failedFuture;
import static oracle.kv.impl.async.FutureUtils.unwrapExceptionVoid;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.ListIterator;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.function.BiConsumer;
import oracle.kv.Consistency;
import oracle.kv.Direction;
import oracle.kv.Key;
import oracle.kv.ValueVersion;
import oracle.kv.impl.api.KVStoreImpl;
import oracle.kv.impl.api.Request;
import oracle.kv.impl.api.ops.IndexIterate;
import oracle.kv.impl.api.ops.IndexKeysIterate;
import oracle.kv.impl.api.ops.InternalOperation;
import oracle.kv.impl.api.ops.Result;
import oracle.kv.impl.api.ops.ResultIndexKeys;
import oracle.kv.impl.api.ops.ResultIndexRows;
import oracle.kv.impl.api.parallelscan.ShardScanIterator;
import oracle.kv.impl.async.AsyncTableIterator;
import oracle.kv.impl.async.IterationHandleNotifier;
import oracle.kv.impl.security.AuthContext;
import oracle.kv.impl.topo.RepGroupId;
import oracle.kv.impl.util.contextlogger.LogContext;
import oracle.kv.query.ExecuteOptions;
import oracle.kv.table.KeyPair;
import oracle.kv.table.MultiGetResult;
import oracle.kv.table.MultiRowOptions;
import oracle.kv.table.Row;
import oracle.kv.table.TableIteratorOptions;
/**
* Implementation of a scatter-gather iterator for secondary indexes. The
* iterator will access the store by shards.
* {@code ShardIndexStream} will use to read a single shard.
* <p>
* Discussion of inclusive/exclusive iterations
* <p>
* Each request sent to the server side needs a start or resume key and an
* optional end key. By default these are inclusive. A {@code FieldRange}
* object may be included to exercise fine control over start/end values for
* range queries. {@code FieldRange} indicates whether the values are inclusive
* or exclusive. {@code FieldValue} objects are typed so the
* inclusive/exclusive state is handled here (on the client side) where they
* can be controlled per-type rather than on the server where they are simple
* {@code byte[]}. This means that the start/end/resume keys are always
* inclusive on the server side.
*/
public class IndexScan {
static final Comparator<byte[]> KEY_BYTES_COMPARATOR =
new Key.BytesComparator();
/* Prevent construction */
private IndexScan() {}
/**
* Creates a table iterator returning ordered rows.
*
* @return a table iterator
*/
static AsyncTableIterator<Row> createTableIterator(
final TableAPIImpl tableAPI,
final IndexKeyImpl indexKey,
final MultiRowOptions mro,
final TableIteratorOptions tio,
final IterationHandleNotifier iterHandleNotifier) {
return createTableIterator(tableAPI, indexKey, mro, tio, null,
iterHandleNotifier);
}
static AsyncTableIterator<Row> createTableIterator(
final TableAPIImpl tableAPI,
final IndexKeyImpl indexKey,
final MultiRowOptions mro,
final TableIteratorOptions tio,
final Set<RepGroupId> shardSet,
final IterationHandleNotifier iterHandleNotifier) {
final TargetTables targetTables =
TableAPIImpl.makeTargetTables(indexKey.getTable(), mro);
final IndexImpl index = (IndexImpl) indexKey.getIndex();
final TableImpl table = index.getTable();
final IndexRange range = new IndexRange(indexKey, mro, tio);
final boolean needDupElim = needDupElimination(indexKey);
ExecuteOptions options = new ExecuteOptions(tio);
Direction dir = (tio != null ? tio.getDirection() : Direction.FORWARD);
return new ShardScanIterator<Row>(tableAPI.getStore(),
null, // baseTopology
options,
dir,
shardSet,
iterHandleNotifier,
true /*isTopologyListner*/) {
@Override
protected ShardStream createStream(RepGroupId groupId) {
return new IndexRowScanStream(groupId);
}
@Override
protected InternalOperation createOp(byte[] resumeSecondaryKey,
byte[] resumePrimaryKey) {
return new IndexIterate(index.getName(),
targetTables,
range,
resumeSecondaryKey,
resumePrimaryKey,
batchSize,
0 /* maxReadKB */,
1 /* emptyReadFactor */);
}
@Override
protected void convertResult(Result result, List<Row> rows) {
convertResultRows(tableAPI, table, targetTables, result, rows);
}
@Override
protected byte[] extractResumeSecondaryKey(Result result) {
return result.getSecondaryResumeKey();
}
@Override
protected int compare(Row one, Row two) {
throw new IllegalStateException("Unexpected call");
}
/**
* IndexRowScanStream subclasses ShardIndexStream in order to
* implement correct ordering of the streams used by an
* IndexRowScanIterator. Specifically, the problem is that
* IndexRowScanIterator returns Row objs, and as a result
* IndexRowScanIterator.compare(), which compares Rows, does not
* do correct ordering. Instead we must compare index keys. If
* two index keys (from different shards) are equal, then the
* associated primary keys are also compared, to make sure that 2
* streams will never have the same order magnitude (the only way
* that 2 streams may both return the same index-key, primary-key
* pair is when both streams retrieve the same row from multiple
* shards in the event of partition migration.
*/
class IndexRowScanStream extends ShardStream {
HashSet<BinaryValueImpl> thePrimKeysSet;
IndexRowScanStream(RepGroupId groupId) {
super(groupId, null, null);
if (needDupElim) {
thePrimKeysSet = new HashSet<BinaryValueImpl>(1000);
}
}
@Override
protected void setResumeKey(Result result) {
super.setResumeKey(result);
if (!needDupElim) {
return;
}
ListIterator<ResultIndexRows> listIter =
result.getIndexRowList().listIterator();
while (listIter.hasNext()) {
ResultIndexRows indexRow = listIter.next();
BinaryValueImpl binPrimKey =
FieldDefImpl.Constants.binaryDef.
createBinary(indexRow.getKeyBytes());
boolean added = thePrimKeysSet.add(binPrimKey);
if (!added) {
listIter.remove();
}
}
}
@Override
protected int compareInternal(Stream o) {
IndexRowScanStream other = (IndexRowScanStream)o;
ResultIndexRows res1 =
currentResultSet.getIndexRowList().
get(currentResultPos);
ResultIndexRows res2 =
other.currentResultSet.getIndexRowList().
get(other.currentResultPos);
byte[] key1 = res1.getIndexKeyBytes();
byte[] key2 = res2.getIndexKeyBytes();
int cmp = IndexImpl.compareUnsignedBytes(key1, key2);
if (cmp == 0) {
cmp = KEY_BYTES_COMPARATOR.compare(res1.getKeyBytes(),
res2.getKeyBytes());
}
return itrDirection == Direction.FORWARD ? cmp : (cmp * -1);
}
}
};
}
/**
* Check whether elimination of duplicate table rows is needed. This is
* true only if the index is multikey. For example, let "array" be a table
* column that is an array of ints, and we are searching for rows whose
* "array" contains a value > 10. Since the index contains an entry for
* each value of "array", and a given row may contain many values > 10
* in its "array"Even then, no elimination is needed
* in the following case:
*
* Let R be the set of index entries that satisfy the search conditions.
* If all entries in R have the same index key (not including the prim
* key columns), then there cannot be 2 entries in R that contain the same
* prim key (i.e. point to the same table row). This is because at the JE
* level, the index key includes both the declared index fileds and the prim
* key columns, and these "physical" keys must be uniqye.
*
* The above case can arise in 2 situations:
* - All the multi-key fields have equality conditions on them.
* - The index is a MapBoth index and there is an equality condition on the
* map-key field.
*/
private static boolean needDupElimination(IndexKeyImpl key) {
IndexImpl index = (IndexImpl)key.getIndex();
if (!index.isMultiKey() || key.isComplete()) {
return false;
}
if (key.size() == 0) {
return true;
}
List<IndexImpl.IndexField> ipaths = index.getIndexFields();
/*
* If the index is a MapBoth one, and the map-key field is set in the
* index key, no dup elim is needed.
*/
if (index.isMapBothIndex()) {
for (int i = 0; i < key.size(); ++i) {
if (ipaths.get(i).isMapKeys()) {
return false;
}
}
}
/*
* If any of the index fields that are not set in the index key are
* multi-key fields, dup elim is needed.
*/
for (int i = key.size(); i < index.numFields(); ++i) {
if (ipaths.get(i).isMultiKey()) {
return true;
}
}
return false;
}
/**
* Creates a table iterator returning ordered key pairs.
*
* @return a table iterator
*/
static AsyncTableIterator<KeyPair> createTableKeysIterator(
final TableAPIImpl apiImpl,
final IndexKeyImpl indexKey,
final MultiRowOptions mro,
final TableIteratorOptions tio,
final IterationHandleNotifier iterHandleNotifier) {
final TargetTables targetTables =
TableAPIImpl.makeTargetTables(indexKey.getTable(), mro);
final IndexImpl index = (IndexImpl) indexKey.getIndex();
final IndexRange range = new IndexRange(indexKey, mro, tio);
ExecuteOptions options = new ExecuteOptions(tio);
Direction dir = (tio != null ? tio.getDirection() : Direction.FORWARD);
return new ShardScanIterator<KeyPair>(apiImpl.getStore(),
null, // baseTopology
options,
dir,
null,
iterHandleNotifier,
true /*isTopologyListner*/) {
@Override
protected ShardStream createStream(RepGroupId groupId) {
return new IndexKeyScanStream(groupId);
}
@Override
protected InternalOperation createOp(byte[] resumeSecondaryKey,
byte[] resumePrimaryKey) {
return new IndexKeysIterate(index.getName(),
targetTables,
range,
resumeSecondaryKey,
resumePrimaryKey,
batchSize,
0, /* maxReadKB */
1 /* emptyReadFactor */);
}
/**
* Convert the results to KeyPair instances. Note that in the
* case where ancestor and/or child table returns are requested
* the IndexKey returned is based on the the index and the table
* containing the index, but the PrimaryKey returned may be from
* a different, ancestor or child table.
*/
@Override
protected void convertResult(Result result,
List<KeyPair> elementList) {
convertResultKeyPairs(index, targetTables, result, elementList);
}
/**
* IndexKeyScanStream exists so that the index and primary keys
* from resulting KeyPair instances can be compared in binary
* format for sorting. With the addition of JSON support it is
* possible for the value types in the IndexKey to be different from
* one row to the next and the FieldValue instances will not compare
* across types.
*
* This comparison ensures the same sort order as in the
* database.
*/
class IndexKeyScanStream extends ShardStream {
IndexKeyScanStream(RepGroupId groupId) {
super(groupId, null, null);
}
@Override
protected int compareInternal(Stream o) {
IndexKeyScanStream other = (IndexKeyScanStream)o;
ResultIndexKeys res1 =
currentResultSet.getIndexKeyList().
get(currentResultPos);
ResultIndexKeys res2 =
other.currentResultSet.getIndexKeyList().
get(other.currentResultPos);
byte[] key1 = res1.getIndexKeyBytes();
byte[] key2 = res2.getIndexKeyBytes();
int cmp = IndexImpl.compareUnsignedBytes(key1, key2);
if (cmp == 0) {
cmp = KEY_BYTES_COMPARATOR.compare(
res1.getPrimaryKeyBytes(),
res2.getPrimaryKeyBytes());
}
return itrDirection == Direction.FORWARD ? cmp : (cmp * -1);
}
}
@Override
protected int compare(KeyPair one, KeyPair two) {
throw new IllegalStateException("Unexpected call");
}
};
}
static MultiGetResult<Row> multiGet(TableAPIImpl apiImpl,
IndexKeyImpl indexKey,
byte[] continuationKey,
MultiRowOptions mro,
TableIteratorOptions tio) {
return new ShardMultiGetHandler(apiImpl, indexKey, continuationKey,
mro, tio)
.execute();
}
static CompletableFuture<MultiGetResult<Row>>
multiGetAsync(TableAPIImpl apiImpl,
IndexKeyImpl indexKey,
byte[] continuationKey,
MultiRowOptions mro,
TableIteratorOptions tio) {
try {
return new ShardMultiGetHandler(apiImpl, indexKey,
continuationKey, mro, tio)
.executeAsync();
} catch (Throwable e) {
return failedFuture(e);
}
}
private static class ShardMultiGetHandler
extends BasicShardMultiGetHandler<Row> {
ShardMultiGetHandler(TableAPIImpl apiImpl,
IndexKeyImpl indexKey,
byte[] continuationKey,
MultiRowOptions mro,
TableIteratorOptions tio) {
super(apiImpl, indexKey, continuationKey, mro, tio);
}
@Override
InternalOperation createIterateOp(int batchSize,
int readKBLimit,
int emptyReadFactor) {
return new IndexIterate(index.getName(),
targetTables,
range,
resumeSecondaryKey,
resumePrimaryKey,
batchSize,
readKBLimit,
emptyReadFactor);
}
@Override
void convertResult(Result result) {
convertResultRows(apiImpl, table, targetTables, result, rows);
}
}
static MultiGetResult<KeyPair> multiGetKeys(TableAPIImpl apiImpl,
IndexKeyImpl indexKey,
byte[] continuationKey,
MultiRowOptions mro,
TableIteratorOptions tio) {
return new ShardMultiGetKeysHandler(apiImpl, indexKey, continuationKey,
mro, tio)
.execute();
}
static CompletableFuture<MultiGetResult<KeyPair>>
multiGetKeysAsync(TableAPIImpl apiImpl,
IndexKeyImpl indexKey,
byte[] continuationKey,
MultiRowOptions mro,
TableIteratorOptions tio) {
try {
return new ShardMultiGetKeysHandler(apiImpl, indexKey,
continuationKey, mro, tio)
.executeAsync();
} catch (Throwable e) {
return failedFuture(e);
}
}
private static class ShardMultiGetKeysHandler
extends BasicShardMultiGetHandler<KeyPair> {
ShardMultiGetKeysHandler(TableAPIImpl apiImpl,
IndexKeyImpl indexKey,
byte[] continuationKey,
MultiRowOptions mro,
TableIteratorOptions tio) {
super(apiImpl, indexKey, continuationKey, mro, tio);
}
@Override
InternalOperation createIterateOp(int batchSize,
int readKBLimit,
int emptyReadFactor) {
return new IndexKeysIterate(index.getName(),
targetTables,
range,
resumeSecondaryKey,
resumePrimaryKey,
batchSize,
readKBLimit,
emptyReadFactor);
}
@Override
void convertResult(Result result) {
convertResultKeyPairs(index, targetTables, result, rows);
}
}
private static void convertResultRows(TableAPIImpl apiImpl,
TableImpl table,
TargetTables targetTables,
Result result,
List<Row> rows) {
final List<ResultIndexRows> indexRowList = result.getIndexRowList();
for (ResultIndexRows indexRow : indexRowList) {
Row converted = convertRow(apiImpl, targetTables, table, indexRow);
rows.add(converted);
}
}
/**
* Converts a single key value into a row.
*/
private static Row convertRow(TableAPIImpl apiImpl,
TargetTables targetTables,
TableImpl table,
ResultIndexRows rowResult) {
/*
* If ancestor table returns may be involved, start at the
* top level table of this hierarchy.
*/
final TableImpl startingTable =
targetTables.hasAncestorTables() ?
table.getTopLevelTable() : table;
final RowImpl fullKey = startingTable.createRowFromKeyBytes(
rowResult.getKeyBytes());
if (fullKey == null) {
throw new IllegalStateException
("Unable to deserialize a row from an index result");
}
final ValueVersion vv =
new ValueVersion(rowResult.getValue(),
rowResult.getVersion());
RowImpl row =
apiImpl.getRowFromValueVersion(
vv,
fullKey,
rowResult.getExpirationTime(),
rowResult.getModificationTime(),
false,
false);
return row;
}
private static void convertResultKeyPairs(IndexImpl index,
TargetTables targetTables,
Result result,
List<KeyPair> keyPairs) {
final TableImpl table = index.getTable();
final List<ResultIndexKeys> results =
result.getIndexKeyList();
for (ResultIndexKeys res : results) {
final IndexKeyImpl indexKeyImpl =
convertIndexKey(index, res.getIndexKeyBytes());
final PrimaryKeyImpl pkey = convertPrimaryKey(table, targetTables,
res);
if (indexKeyImpl != null && pkey != null) {
keyPairs.add(new KeyPair(pkey, indexKeyImpl));
} else {
keyPairs.add(null);
}
}
}
private static IndexKeyImpl convertIndexKey(IndexImpl index, byte[] bytes) {
/* don't allow partial keys */
return index.deserializeIndexKey(bytes, false);
}
private static PrimaryKeyImpl convertPrimaryKey(TableImpl table,
TargetTables targetTables,
ResultIndexKeys res) {
/*
* If ancestor table returns may be involved, start at the
* top level table of this hierarchy.
*/
final TableImpl startingTable =
targetTables.hasAncestorTables() ?
table.getTopLevelTable() : table;
final PrimaryKeyImpl pkey = startingTable.
createPrimaryKeyFromKeyBytes(res.getPrimaryKeyBytes());
pkey.setExpirationTime(res.getExpirationTime());
return pkey;
}
/**
* A handler to fetch matching rows shard by shard.
*/
private static abstract class BasicShardMultiGetHandler<T> {
final TableAPIImpl apiImpl;
final KVStoreImpl store;
final RepGroupId[] repGroupIds;
final IndexImpl index;
final TableImpl table;
final byte[] continuationKey;
final TargetTables targetTables;
final IndexRange range;
final Consistency consistency;
final long requestTimeout;
final TimeUnit timeoutUnit;
final int batchResultSize;
final int maxReadKB;
private LogContext lc;
private AuthContext auth;
private int opBatchSize;
private int opMaxReadKB;
final List<T> rows = new ArrayList<T>();
byte[] resumeSecondaryKey = null;
byte[] resumePrimaryKey = null;
private RepGroupId groupId;
private int numRead = 0;
private int readKB = 0;
private int writeKB = 0;
private byte[] contdKey = null;
private boolean noCharge = false;
BasicShardMultiGetHandler(TableAPIImpl apiImpl,
IndexKeyImpl key,
byte[] continuationKey,
MultiRowOptions mro,
TableIteratorOptions tio) {
this.apiImpl = apiImpl;
store = apiImpl.getStore();
Set<RepGroupId> rgids = store.getTopology().getRepGroupIds();
repGroupIds = rgids.toArray(new RepGroupId[rgids.size()]);
index = key.getIndexImpl();
table = key.getTable();
this.continuationKey = continuationKey;
targetTables = TableAPIImpl.makeTargetTables(key.getTable(), mro);
range = new IndexRange(key, mro, tio);
consistency = TableAPIImpl.getConsistency(tio);
requestTimeout = TableAPIImpl.getTimeout(tio);
timeoutUnit = TableAPIImpl.getTimeoutUnit(tio);
batchResultSize = TableAPIImpl.getBatchSize(tio);
maxReadKB = TableAPIImpl.getMaxReadKB(tio);
opBatchSize = batchResultSize;
opMaxReadKB = maxReadKB;
if (tio != null) {
lc = tio.getLogContext();
auth = tio.getAuthContext();
noCharge = tio.getNoCharge();
}
}
/* Abstract method to create IndexIterate operation */
abstract InternalOperation createIterateOp(int batchSize,
int readKBLimit,
int emptyReadFactor);
/* Abstract method to convert to the results */
abstract void convertResult(Result result);
MultiGetResult<T> execute() {
initIteration();
while (true) {
final Request request = createRequest();
final Result result = store.executeRequest(request);
if (processResult(result)) {
break;
}
}
return createResult();
}
/**
* Initializes the groupId, resumeSecondaryKey, and resumePrimaryKey
* fields for the start of the iteration.
*/
private void initIteration() {
if (continuationKey != null && continuationKey.length > 0) {
/*
* Extract the shard id, resume secondary key and resume
* primary key.
*/
int pos = 0;
int gid = continuationKey[pos++];
if (gid < 1 || gid > repGroupIds.length) {
throw new IllegalArgumentException("Invalid shard id " +
"in continuation key: " + gid);
}
groupId = new RepGroupId(gid);
if (continuationKey.length > 1) {
int len = continuationKey[pos++];
if (len > 0) {
resumeSecondaryKey = Arrays.copyOfRange(
continuationKey, pos, pos + len);
pos += len;
assert(pos < continuationKey.length);
len = continuationKey[pos++];
resumePrimaryKey = Arrays.copyOfRange(
continuationKey, pos, pos + len);
}
}
} else {
groupId = getNextRepGroup(null);
}
}
/**
* Creates a request to get the next batch of results from the current
* shard and resume keys.
*/
private Request createRequest() {
final int emptyReadFactor = (readKB == 0 &&
groupId.getGroupId() == repGroupIds.length) ? 1 : 0;
final InternalOperation op = createIterateOp(opBatchSize,
opMaxReadKB,
emptyReadFactor);
final Request req =
store.makeReadRequest(op, groupId, consistency,
requestTimeout, timeoutUnit);
req.setLogContext(lc);
req.setAuthContext(auth);
req.setNoCharge(noCharge);
return req;
}
/**
* Process the results of a single server operation, updating fields
* with the progress of the iteration. Returns whether the iteration
* is done. Returns true if iteration is complete for now and the
* result should be returned, and false if the iteration should
* continue.
*/
private boolean processResult(Result result) {
numRead += result.getNumRecords();
readKB += result.getReadKB();
writeKB += result.getWriteKB();
if (result.getNumRecords() > 0) {
convertResult(result);
resumeSecondaryKey = result.getSecondaryResumeKey();
resumePrimaryKey = result.getPrimaryResumeKey();
}
/*
* Stop fetching if still has more element to fetch from current
* shard.
*/
if (result.hasMoreElements()) {
contdKey = genContinuationKey(groupId, resumeSecondaryKey,
resumePrimaryKey);
return true;
}
/*
* Move to next shard
*/
groupId = getNextRepGroup(groupId);
if (groupId == null) {
return true;
}
/*
* If maxReadKB is specified, check the actual read cost and
* stop fetching if current read cost has reached the maxReadKB,
* the continuation key points to the beginning of the current
* RepGroup.
*/
if (maxReadKB != 0) {
if (readKB >= maxReadKB) {
contdKey = genContinuationKey(groupId, null, null);
return true;
}
opMaxReadKB = maxReadKB - readKB;
}
/*
* If batchResultSize is specified, check on the number of rows
* fetched and stop fetching if the number of rows has reached
* the batchResultSize, the continuation key points to the
* beginning of the current RepGroup.
*/
if (batchResultSize != 0) {
if (numRead >= batchResultSize) {
contdKey = genContinuationKey(groupId, null, null);
return true;
}
opBatchSize = batchResultSize - numRead;
}
if (resumeSecondaryKey != null) {
resumeSecondaryKey = null;
resumePrimaryKey = null;
}
return false;
}
/** Returns the result object that should be returned. */
private MultiGetResult<T> createResult() {
return new MultiGetResult<T>(rows, contdKey, readKB, writeKB);
}
/** Executes the iteration and returns the result asynchronously. */
CompletableFuture<MultiGetResult<T>> executeAsync() {
try {
initIteration();
final CompletableFuture<MultiGetResult<T>> future =
new CompletableFuture<>();
class ExecuteAsyncHandler
implements BiConsumer<Result, Throwable> {
void execute() {
store.executeRequestAsync(createRequest())
.whenComplete(unwrapExceptionVoid(this));
}
@Override
public void accept(Result result, Throwable exception) {
try {
if (exception != null) {
future.completeExceptionally(exception);
} else if (processResult(result)) {
future.complete(createResult());
} else {
/* TODO: Use dialog layer executor */
CompletableFuture.runAsync(this::execute);
}
} catch (Throwable t) {
future.completeExceptionally(t);
}
}
}
new ExecuteAsyncHandler().execute();
return future;
} catch (Throwable t) {
return failedFuture(t);
}
}
/**
* Generates the continuation key:
* <repGroupid, resumeSecondaryKey, resumePrimaryKey>
*/
private static byte[] genContinuationKey(RepGroupId repGroupId,
byte[] resumeSecondKey,
byte[] resumePrimaryKey) {
int len = (resumeSecondKey != null ?
resumeSecondKey.length + 1 : 0) +
(resumePrimaryKey != null ?
resumePrimaryKey.length + 1 : 0) + 1;
final byte[] bytes = new byte[len];
int pos = 0;
/* repGroupId */
bytes[pos++] = (byte)repGroupId.getGroupId();
if (resumeSecondKey != null) {
/* resumeSecondaryKey */
bytes[pos++] = (byte)resumeSecondKey.length;
System.arraycopy(resumeSecondKey, 0, bytes, pos,
resumeSecondKey.length);
pos += resumeSecondKey.length;
/* resumePrimaryKey */
if (resumePrimaryKey != null) {
bytes[pos++] = (byte)resumePrimaryKey.length;
System.arraycopy(resumePrimaryKey, 0, bytes, pos,
resumePrimaryKey.length);
}
}
return bytes;
}
/**
* Returns the next partition id of the specified partition, if the
* input RepGroupId is null, then return the first RepGroupId.
*/
private RepGroupId getNextRepGroup(RepGroupId repGroupId) {
if (repGroupId == null) {
return repGroupIds[0];
}
if (repGroupId.getGroupId() == repGroupIds.length) {
return null;
}
return repGroupIds[repGroupId.getGroupId()];
}
}
}
|
googleapis/google-cloud-java | 35,706 | java-datastream/proto-google-cloud-datastream-v1alpha1/src/main/java/com/google/cloud/datastream/v1alpha1/MysqlDatabase.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datastream/v1alpha1/datastream_resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datastream.v1alpha1;
/**
*
*
* <pre>
* MySQL database.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1alpha1.MysqlDatabase}
*/
public final class MysqlDatabase extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1alpha1.MysqlDatabase)
MysqlDatabaseOrBuilder {
private static final long serialVersionUID = 0L;
// Use MysqlDatabase.newBuilder() to construct.
private MysqlDatabase(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MysqlDatabase() {
databaseName_ = "";
mysqlTables_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MysqlDatabase();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_MysqlDatabase_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_MysqlDatabase_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1alpha1.MysqlDatabase.class,
com.google.cloud.datastream.v1alpha1.MysqlDatabase.Builder.class);
}
public static final int DATABASE_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object databaseName_ = "";
/**
*
*
* <pre>
* Database name.
* </pre>
*
* <code>string database_name = 1;</code>
*
* @return The databaseName.
*/
@java.lang.Override
public java.lang.String getDatabaseName() {
java.lang.Object ref = databaseName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
databaseName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Database name.
* </pre>
*
* <code>string database_name = 1;</code>
*
* @return The bytes for databaseName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDatabaseNameBytes() {
java.lang.Object ref = databaseName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
databaseName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MYSQL_TABLES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datastream.v1alpha1.MysqlTable> mysqlTables_;
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datastream.v1alpha1.MysqlTable> getMysqlTablesList() {
return mysqlTables_;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datastream.v1alpha1.MysqlTableOrBuilder>
getMysqlTablesOrBuilderList() {
return mysqlTables_;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
@java.lang.Override
public int getMysqlTablesCount() {
return mysqlTables_.size();
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlTable getMysqlTables(int index) {
return mysqlTables_.get(index);
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlTableOrBuilder getMysqlTablesOrBuilder(
int index) {
return mysqlTables_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, databaseName_);
}
for (int i = 0; i < mysqlTables_.size(); i++) {
output.writeMessage(2, mysqlTables_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(databaseName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, databaseName_);
}
for (int i = 0; i < mysqlTables_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, mysqlTables_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datastream.v1alpha1.MysqlDatabase)) {
return super.equals(obj);
}
com.google.cloud.datastream.v1alpha1.MysqlDatabase other =
(com.google.cloud.datastream.v1alpha1.MysqlDatabase) obj;
if (!getDatabaseName().equals(other.getDatabaseName())) return false;
if (!getMysqlTablesList().equals(other.getMysqlTablesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DATABASE_NAME_FIELD_NUMBER;
hash = (53 * hash) + getDatabaseName().hashCode();
if (getMysqlTablesCount() > 0) {
hash = (37 * hash) + MYSQL_TABLES_FIELD_NUMBER;
hash = (53 * hash) + getMysqlTablesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.datastream.v1alpha1.MysqlDatabase prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* MySQL database.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1alpha1.MysqlDatabase}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1alpha1.MysqlDatabase)
com.google.cloud.datastream.v1alpha1.MysqlDatabaseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_MysqlDatabase_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_MysqlDatabase_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1alpha1.MysqlDatabase.class,
com.google.cloud.datastream.v1alpha1.MysqlDatabase.Builder.class);
}
// Construct using com.google.cloud.datastream.v1alpha1.MysqlDatabase.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
databaseName_ = "";
if (mysqlTablesBuilder_ == null) {
mysqlTables_ = java.util.Collections.emptyList();
} else {
mysqlTables_ = null;
mysqlTablesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_MysqlDatabase_descriptor;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlDatabase getDefaultInstanceForType() {
return com.google.cloud.datastream.v1alpha1.MysqlDatabase.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlDatabase build() {
com.google.cloud.datastream.v1alpha1.MysqlDatabase result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlDatabase buildPartial() {
com.google.cloud.datastream.v1alpha1.MysqlDatabase result =
new com.google.cloud.datastream.v1alpha1.MysqlDatabase(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datastream.v1alpha1.MysqlDatabase result) {
if (mysqlTablesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
mysqlTables_ = java.util.Collections.unmodifiableList(mysqlTables_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.mysqlTables_ = mysqlTables_;
} else {
result.mysqlTables_ = mysqlTablesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datastream.v1alpha1.MysqlDatabase result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.databaseName_ = databaseName_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datastream.v1alpha1.MysqlDatabase) {
return mergeFrom((com.google.cloud.datastream.v1alpha1.MysqlDatabase) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datastream.v1alpha1.MysqlDatabase other) {
if (other == com.google.cloud.datastream.v1alpha1.MysqlDatabase.getDefaultInstance())
return this;
if (!other.getDatabaseName().isEmpty()) {
databaseName_ = other.databaseName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (mysqlTablesBuilder_ == null) {
if (!other.mysqlTables_.isEmpty()) {
if (mysqlTables_.isEmpty()) {
mysqlTables_ = other.mysqlTables_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureMysqlTablesIsMutable();
mysqlTables_.addAll(other.mysqlTables_);
}
onChanged();
}
} else {
if (!other.mysqlTables_.isEmpty()) {
if (mysqlTablesBuilder_.isEmpty()) {
mysqlTablesBuilder_.dispose();
mysqlTablesBuilder_ = null;
mysqlTables_ = other.mysqlTables_;
bitField0_ = (bitField0_ & ~0x00000002);
mysqlTablesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getMysqlTablesFieldBuilder()
: null;
} else {
mysqlTablesBuilder_.addAllMessages(other.mysqlTables_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
databaseName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.datastream.v1alpha1.MysqlTable m =
input.readMessage(
com.google.cloud.datastream.v1alpha1.MysqlTable.parser(),
extensionRegistry);
if (mysqlTablesBuilder_ == null) {
ensureMysqlTablesIsMutable();
mysqlTables_.add(m);
} else {
mysqlTablesBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object databaseName_ = "";
/**
*
*
* <pre>
* Database name.
* </pre>
*
* <code>string database_name = 1;</code>
*
* @return The databaseName.
*/
public java.lang.String getDatabaseName() {
java.lang.Object ref = databaseName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
databaseName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Database name.
* </pre>
*
* <code>string database_name = 1;</code>
*
* @return The bytes for databaseName.
*/
public com.google.protobuf.ByteString getDatabaseNameBytes() {
java.lang.Object ref = databaseName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
databaseName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Database name.
* </pre>
*
* <code>string database_name = 1;</code>
*
* @param value The databaseName to set.
* @return This builder for chaining.
*/
public Builder setDatabaseName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
databaseName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Database name.
* </pre>
*
* <code>string database_name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearDatabaseName() {
databaseName_ = getDefaultInstance().getDatabaseName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Database name.
* </pre>
*
* <code>string database_name = 1;</code>
*
* @param value The bytes for databaseName to set.
* @return This builder for chaining.
*/
public Builder setDatabaseNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
databaseName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.datastream.v1alpha1.MysqlTable> mysqlTables_ =
java.util.Collections.emptyList();
private void ensureMysqlTablesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
mysqlTables_ =
new java.util.ArrayList<com.google.cloud.datastream.v1alpha1.MysqlTable>(mysqlTables_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.MysqlTable,
com.google.cloud.datastream.v1alpha1.MysqlTable.Builder,
com.google.cloud.datastream.v1alpha1.MysqlTableOrBuilder>
mysqlTablesBuilder_;
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1alpha1.MysqlTable> getMysqlTablesList() {
if (mysqlTablesBuilder_ == null) {
return java.util.Collections.unmodifiableList(mysqlTables_);
} else {
return mysqlTablesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public int getMysqlTablesCount() {
if (mysqlTablesBuilder_ == null) {
return mysqlTables_.size();
} else {
return mysqlTablesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlTable getMysqlTables(int index) {
if (mysqlTablesBuilder_ == null) {
return mysqlTables_.get(index);
} else {
return mysqlTablesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public Builder setMysqlTables(
int index, com.google.cloud.datastream.v1alpha1.MysqlTable value) {
if (mysqlTablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMysqlTablesIsMutable();
mysqlTables_.set(index, value);
onChanged();
} else {
mysqlTablesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public Builder setMysqlTables(
int index, com.google.cloud.datastream.v1alpha1.MysqlTable.Builder builderForValue) {
if (mysqlTablesBuilder_ == null) {
ensureMysqlTablesIsMutable();
mysqlTables_.set(index, builderForValue.build());
onChanged();
} else {
mysqlTablesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public Builder addMysqlTables(com.google.cloud.datastream.v1alpha1.MysqlTable value) {
if (mysqlTablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMysqlTablesIsMutable();
mysqlTables_.add(value);
onChanged();
} else {
mysqlTablesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public Builder addMysqlTables(
int index, com.google.cloud.datastream.v1alpha1.MysqlTable value) {
if (mysqlTablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMysqlTablesIsMutable();
mysqlTables_.add(index, value);
onChanged();
} else {
mysqlTablesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public Builder addMysqlTables(
com.google.cloud.datastream.v1alpha1.MysqlTable.Builder builderForValue) {
if (mysqlTablesBuilder_ == null) {
ensureMysqlTablesIsMutable();
mysqlTables_.add(builderForValue.build());
onChanged();
} else {
mysqlTablesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public Builder addMysqlTables(
int index, com.google.cloud.datastream.v1alpha1.MysqlTable.Builder builderForValue) {
if (mysqlTablesBuilder_ == null) {
ensureMysqlTablesIsMutable();
mysqlTables_.add(index, builderForValue.build());
onChanged();
} else {
mysqlTablesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public Builder addAllMysqlTables(
java.lang.Iterable<? extends com.google.cloud.datastream.v1alpha1.MysqlTable> values) {
if (mysqlTablesBuilder_ == null) {
ensureMysqlTablesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, mysqlTables_);
onChanged();
} else {
mysqlTablesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public Builder clearMysqlTables() {
if (mysqlTablesBuilder_ == null) {
mysqlTables_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
mysqlTablesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public Builder removeMysqlTables(int index) {
if (mysqlTablesBuilder_ == null) {
ensureMysqlTablesIsMutable();
mysqlTables_.remove(index);
onChanged();
} else {
mysqlTablesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlTable.Builder getMysqlTablesBuilder(
int index) {
return getMysqlTablesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlTableOrBuilder getMysqlTablesOrBuilder(
int index) {
if (mysqlTablesBuilder_ == null) {
return mysqlTables_.get(index);
} else {
return mysqlTablesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public java.util.List<? extends com.google.cloud.datastream.v1alpha1.MysqlTableOrBuilder>
getMysqlTablesOrBuilderList() {
if (mysqlTablesBuilder_ != null) {
return mysqlTablesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(mysqlTables_);
}
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlTable.Builder addMysqlTablesBuilder() {
return getMysqlTablesFieldBuilder()
.addBuilder(com.google.cloud.datastream.v1alpha1.MysqlTable.getDefaultInstance());
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.MysqlTable.Builder addMysqlTablesBuilder(
int index) {
return getMysqlTablesFieldBuilder()
.addBuilder(index, com.google.cloud.datastream.v1alpha1.MysqlTable.getDefaultInstance());
}
/**
*
*
* <pre>
* Tables in the database.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.MysqlTable mysql_tables = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1alpha1.MysqlTable.Builder>
getMysqlTablesBuilderList() {
return getMysqlTablesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.MysqlTable,
com.google.cloud.datastream.v1alpha1.MysqlTable.Builder,
com.google.cloud.datastream.v1alpha1.MysqlTableOrBuilder>
getMysqlTablesFieldBuilder() {
if (mysqlTablesBuilder_ == null) {
mysqlTablesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.MysqlTable,
com.google.cloud.datastream.v1alpha1.MysqlTable.Builder,
com.google.cloud.datastream.v1alpha1.MysqlTableOrBuilder>(
mysqlTables_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
mysqlTables_ = null;
}
return mysqlTablesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1alpha1.MysqlDatabase)
}
// @@protoc_insertion_point(class_scope:google.cloud.datastream.v1alpha1.MysqlDatabase)
private static final com.google.cloud.datastream.v1alpha1.MysqlDatabase DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datastream.v1alpha1.MysqlDatabase();
}
public static com.google.cloud.datastream.v1alpha1.MysqlDatabase getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MysqlDatabase> PARSER =
new com.google.protobuf.AbstractParser<MysqlDatabase>() {
@java.lang.Override
public MysqlDatabase parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MysqlDatabase> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MysqlDatabase> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.MysqlDatabase getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/flink | 36,000 | flink-test-utils-parent/flink-connector-test-utils/src/main/java/org/apache/flink/connector/testframe/testsuites/SourceTestSuiteBase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.connector.testframe.testsuites;
import org.apache.flink.annotation.Experimental;
import org.apache.flink.api.common.JobID;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.api.common.eventtime.WatermarkStrategy;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.api.connector.source.Boundedness;
import org.apache.flink.api.connector.source.Source;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RpcOptions;
import org.apache.flink.connector.testframe.environment.ClusterControllable;
import org.apache.flink.connector.testframe.environment.TestEnvironment;
import org.apache.flink.connector.testframe.environment.TestEnvironmentSettings;
import org.apache.flink.connector.testframe.external.ExternalSystemSplitDataWriter;
import org.apache.flink.connector.testframe.external.source.DataStreamSourceExternalContext;
import org.apache.flink.connector.testframe.external.source.TestingSourceSettings;
import org.apache.flink.connector.testframe.junit.extensions.ConnectorTestingExtension;
import org.apache.flink.connector.testframe.junit.extensions.TestCaseInvocationContextProvider;
import org.apache.flink.connector.testframe.utils.CollectIteratorAssertions;
import org.apache.flink.connector.testframe.utils.MetricQuerier;
import org.apache.flink.core.execution.CheckpointingMode;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.core.execution.SavepointFormatType;
import org.apache.flink.runtime.metrics.MetricNames;
import org.apache.flink.runtime.rest.RestClient;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.api.functions.sink.v2.DiscardingSink;
import org.apache.flink.streaming.api.operators.collect.CollectResultIterator;
import org.apache.flink.streaming.api.operators.collect.CollectSinkOperatorFactory;
import org.apache.flink.streaming.api.operators.collect.CollectStreamSink;
import org.apache.flink.streaming.util.RestartStrategyUtils;
import org.apache.flink.util.CloseableIterator;
import org.apache.flink.util.TestLoggerExtension;
import org.apache.commons.math3.util.Precision;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.TestInstance;
import org.junit.jupiter.api.TestTemplate;
import org.junit.jupiter.api.extension.ExtendWith;
import org.opentest4j.TestAbortedException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import static java.util.Collections.singletonList;
import static java.util.concurrent.CompletableFuture.runAsync;
import static org.apache.flink.connector.testframe.utils.MetricQuerier.getJobDetails;
import static org.apache.flink.core.testutils.FlinkAssertions.assertThatFuture;
import static org.apache.flink.runtime.testutils.CommonTestUtils.terminateJob;
import static org.apache.flink.runtime.testutils.CommonTestUtils.waitForAllTaskRunning;
import static org.apache.flink.runtime.testutils.CommonTestUtils.waitForJobStatus;
import static org.apache.flink.runtime.testutils.CommonTestUtils.waitUntilCondition;
/**
* Base class for all test suites.
*
* <p>All cases should have well-descriptive JavaDoc, including:
*
* <ul>
* <li>What's the purpose of this case
* <li>Simple description of how this case works
* <li>Condition to fulfill in order to pass this case
* <li>Requirement of running this case
* </ul>
*/
@ExtendWith({
ConnectorTestingExtension.class,
TestLoggerExtension.class,
TestCaseInvocationContextProvider.class
})
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@Experimental
public abstract class SourceTestSuiteBase<T> {
private static final Logger LOG = LoggerFactory.getLogger(SourceTestSuiteBase.class);
// ----------------------------- Basic test cases ---------------------------------
/**
* Test connector source with only one split in the external system.
*
* <p>This test will create one split in the external system, write test data into it, and
* consume back via a Flink job with 1 parallelism.
*
* <p>The number and order of records consumed by Flink need to be identical to the test data
* written to the external system in order to pass this test.
*
* <p>A bounded source is required for this test.
*/
@TestTemplate
@DisplayName("Test source with single split")
public void testSourceSingleSplit(
TestEnvironment testEnv,
DataStreamSourceExternalContext<T> externalContext,
CheckpointingMode semantic)
throws Exception {
// Step 1: Preparation
TestingSourceSettings sourceSettings =
TestingSourceSettings.builder()
.setBoundedness(Boundedness.BOUNDED)
.setCheckpointingMode(semantic)
.build();
TestEnvironmentSettings envSettings =
TestEnvironmentSettings.builder()
.setConnectorJarPaths(externalContext.getConnectorJarPaths())
.build();
Source<T, ?, ?> source = tryCreateSource(externalContext, sourceSettings);
// Step 2: Write test data to external system
List<T> testRecords = generateAndWriteTestData(0, externalContext, sourceSettings);
// Step 3: Build and execute Flink job
StreamExecutionEnvironment execEnv = testEnv.createExecutionEnvironment(envSettings);
DataStreamSource<T> stream =
execEnv.fromSource(source, WatermarkStrategy.noWatermarks(), "Tested Source")
.setParallelism(1);
CollectIteratorBuilder<T> iteratorBuilder = addCollectSink(stream);
JobClient jobClient = submitJob(execEnv, "Source Single Split Test");
// Step 5: Validate test data
try (CollectResultIterator<T> resultIterator = iteratorBuilder.build(jobClient)) {
// Check test result
LOG.info("Checking test results");
checkResultWithSemantic(resultIterator, singletonList(testRecords), semantic, null);
}
// Step 5: Clean up
waitForJobStatus(jobClient, singletonList(JobStatus.FINISHED));
}
/**
* Test connector source with multiple splits in the external system
*
* <p>This test will create 4 splits in the external system, write test data to all splits, and
* consume back via a Flink job with 4 parallelism.
*
* <p>The number and order of records in each split consumed by Flink need to be identical to
* the test data written into the external system to pass this test. There's no requirement for
* record order across splits.
*
* <p>A bounded source is required for this test.
*/
@TestTemplate
@DisplayName("Test source with multiple splits")
public void testMultipleSplits(
TestEnvironment testEnv,
DataStreamSourceExternalContext<T> externalContext,
CheckpointingMode semantic)
throws Exception {
// Step 1: Preparation
TestingSourceSettings sourceSettings =
TestingSourceSettings.builder()
.setBoundedness(Boundedness.BOUNDED)
.setCheckpointingMode(semantic)
.build();
TestEnvironmentSettings envOptions =
TestEnvironmentSettings.builder()
.setConnectorJarPaths(externalContext.getConnectorJarPaths())
.build();
Source<T, ?, ?> source = tryCreateSource(externalContext, sourceSettings);
// Step 2: Write test data to external system
int splitNumber = 4;
List<List<T>> testRecordsLists = new ArrayList<>();
for (int i = 0; i < splitNumber; i++) {
testRecordsLists.add(generateAndWriteTestData(i, externalContext, sourceSettings));
}
// Step 3: Build and execute Flink job
StreamExecutionEnvironment execEnv = testEnv.createExecutionEnvironment(envOptions);
DataStreamSource<T> stream =
execEnv.fromSource(source, WatermarkStrategy.noWatermarks(), "Tested Source")
.setParallelism(splitNumber);
CollectIteratorBuilder<T> iteratorBuilder = addCollectSink(stream);
JobClient jobClient = submitJob(execEnv, "Source Multiple Split Test");
// Step 4: Validate test data
try (CloseableIterator<T> resultIterator = iteratorBuilder.build(jobClient)) {
// Check test result
LOG.info("Checking test results");
checkResultWithSemantic(resultIterator, testRecordsLists, semantic, null);
}
}
/**
* Test connector source restart from a savepoint.
*
* <p>This test will create 4 splits in the external system first, write test data to all
* splits, and consume back via a Flink job. Then stop the job with savepoint, restart the job
* from the checkpoint. After the job has been running, add some extra data to the source and
* compare the result.
*
* <p>The number and order of records in each split consumed by Flink need to be identical to
* the test data written into the external system to pass this test. There's no requirement for
* record order across splits.
*/
@TestTemplate
@DisplayName("Test source restarting from a savepoint")
public void testSavepoint(
TestEnvironment testEnv,
DataStreamSourceExternalContext<T> externalContext,
CheckpointingMode semantic)
throws Exception {
restartFromSavepoint(testEnv, externalContext, semantic, 4, 4, 4);
}
/**
* Test connector source restart from a savepoint with a higher parallelism.
*
* <p>This test will create 4 splits in the external system first, write test data to all splits
* and consume back via a Flink job with parallelism 2. Then stop the job with savepoint,
* restart the job from the checkpoint with a higher parallelism 4. After the job has been
* running, add some extra data to the source and compare the result.
*
* <p>The number and order of records in each split consumed by Flink need to be identical to
* the test data written into the external system to pass this test. There's no requirement for
* record order across splits.
*/
@TestTemplate
@DisplayName("Test source restarting with a higher parallelism")
public void testScaleUp(
TestEnvironment testEnv,
DataStreamSourceExternalContext<T> externalContext,
CheckpointingMode semantic)
throws Exception {
restartFromSavepoint(testEnv, externalContext, semantic, 4, 2, 4);
}
/**
* Test connector source restart from a savepoint with a lower parallelism.
*
* <p>This test will create 4 splits in the external system first, write test data to all splits
* and consume back via a Flink job with parallelism 4. Then stop the job with savepoint,
* restart the job from the checkpoint with a lower parallelism 2. After the job has been
* running, add some extra data to the source and compare the result.
*
* <p>The number and order of records in each split consumed by Flink need to be identical to
* the test data written into the external system to pass this test. There's no requirement for
* record order across splits.
*/
@TestTemplate
@DisplayName("Test source restarting with a lower parallelism")
public void testScaleDown(
TestEnvironment testEnv,
DataStreamSourceExternalContext<T> externalContext,
CheckpointingMode semantic)
throws Exception {
restartFromSavepoint(testEnv, externalContext, semantic, 4, 4, 2);
}
private void restartFromSavepoint(
TestEnvironment testEnv,
DataStreamSourceExternalContext<T> externalContext,
CheckpointingMode semantic,
final int splitNumber,
final int beforeParallelism,
final int afterParallelism)
throws Exception {
// Step 1: Preparation
TestingSourceSettings sourceSettings =
TestingSourceSettings.builder()
.setBoundedness(Boundedness.CONTINUOUS_UNBOUNDED)
.setCheckpointingMode(semantic)
.build();
TestEnvironmentSettings envOptions =
TestEnvironmentSettings.builder()
.setConnectorJarPaths(externalContext.getConnectorJarPaths())
.build();
// Step 2: Generate test data
final List<ExternalSystemSplitDataWriter<T>> writers = new ArrayList<>();
final List<List<T>> testRecordCollections = new ArrayList<>();
for (int i = 0; i < splitNumber; i++) {
writers.add(externalContext.createSourceSplitDataWriter(sourceSettings));
testRecordCollections.add(
generateTestDataForWriter(externalContext, sourceSettings, i, writers.get(i)));
}
// Step 3: Build and execute Flink job
final StreamExecutionEnvironment execEnv = testEnv.createExecutionEnvironment(envOptions);
execEnv.getCheckpointConfig()
.setCheckpointingConsistencyMode(CheckpointingMode.EXACTLY_ONCE);
execEnv.enableCheckpointing(50);
RestartStrategyUtils.configureNoRestartStrategy(execEnv);
DataStreamSource<T> source =
execEnv.fromSource(
tryCreateSource(externalContext, sourceSettings),
WatermarkStrategy.noWatermarks(),
"Tested Source")
.setParallelism(beforeParallelism);
CollectIteratorBuilder<T> iteratorBuilder = addCollectSink(source);
final JobClient jobClient = execEnv.executeAsync("Restart Test");
// Step 4: Check the result and stop Flink job with a savepoint
CollectResultIterator<T> iterator = null;
try {
iterator = iteratorBuilder.build(jobClient);
checkResultWithSemantic(
iterator,
testRecordCollections,
semantic,
getTestDataSize(testRecordCollections));
} catch (Exception e) {
killJob(jobClient);
throw e;
}
String savepointPath =
jobClient
.stopWithSavepoint(
false, testEnv.getCheckpointUri(), SavepointFormatType.CANONICAL)
.get(30, TimeUnit.SECONDS);
waitForJobStatus(jobClient, singletonList(JobStatus.FINISHED));
// Step 5: Generate new test data
final List<List<T>> newTestRecordCollections = new ArrayList<>();
for (int i = 0; i < splitNumber; i++) {
newTestRecordCollections.add(
generateTestDataForWriter(externalContext, sourceSettings, i, writers.get(i)));
}
// Step 6: restart the Flink job with the savepoint
TestEnvironmentSettings restartEnvOptions =
TestEnvironmentSettings.builder()
.setConnectorJarPaths(externalContext.getConnectorJarPaths())
.setSavepointRestorePath(savepointPath)
.build();
final StreamExecutionEnvironment restartEnv =
testEnv.createExecutionEnvironment(restartEnvOptions);
restartEnv.enableCheckpointing(500);
restartEnv
.getCheckpointConfig()
.setCheckpointingConsistencyMode(CheckpointingMode.EXACTLY_ONCE);
DataStreamSource<T> restartSource =
restartEnv
.fromSource(
tryCreateSource(externalContext, sourceSettings),
WatermarkStrategy.noWatermarks(),
"Tested Source")
.setParallelism(afterParallelism);
addCollectSink(restartSource);
final JobClient restartJobClient = restartEnv.executeAsync("Restart Test");
waitForJobStatus(restartJobClient, singletonList(JobStatus.RUNNING));
try {
iterator.setJobClient(restartJobClient);
/*
* Use the same iterator as the previous run, because the CollectStreamSink will snapshot
* its state and recover from it.
*
* The fetcher in CollectResultIterator is responsible for comminicating with
* the CollectSinkFunction, and deal the result with CheckpointedCollectResultBuffer
* in EXACTLY_ONCE semantic.
*/
checkResultWithSemantic(
iterator,
newTestRecordCollections,
semantic,
getTestDataSize(newTestRecordCollections));
} finally {
// Clean up
killJob(restartJobClient);
iterator.close();
}
}
/**
* Test connector source metrics.
*
* <p>This test will create 4 splits in the external system first, write test data to all splits
* and consume back via a Flink job with parallelism 4. Then read and compare the metrics.
*
* <p>Now test: numRecordsIn
*/
@TestTemplate
@DisplayName("Test source metrics")
public void testSourceMetrics(
TestEnvironment testEnv,
DataStreamSourceExternalContext<T> externalContext,
CheckpointingMode semantic)
throws Exception {
TestingSourceSettings sourceSettings =
TestingSourceSettings.builder()
.setBoundedness(Boundedness.CONTINUOUS_UNBOUNDED)
.setCheckpointingMode(semantic)
.build();
TestEnvironmentSettings envOptions =
TestEnvironmentSettings.builder()
.setConnectorJarPaths(externalContext.getConnectorJarPaths())
.build();
final int splitNumber = 4;
final List<List<T>> testRecordCollections = new ArrayList<>();
for (int i = 0; i < splitNumber; i++) {
testRecordCollections.add(generateAndWriteTestData(i, externalContext, sourceSettings));
}
// make sure use different names when executes multi times
String sourceName = "metricTestSource" + testRecordCollections.hashCode();
final StreamExecutionEnvironment env = testEnv.createExecutionEnvironment(envOptions);
final DataStreamSource<T> dataStreamSource =
env.fromSource(
tryCreateSource(externalContext, sourceSettings),
WatermarkStrategy.noWatermarks(),
sourceName)
.setParallelism(splitNumber);
dataStreamSource.sinkTo(new DiscardingSink<>());
final JobClient jobClient = env.executeAsync("Metrics Test");
final MetricQuerier queryRestClient = new MetricQuerier(new Configuration());
final ExecutorService executorService = Executors.newCachedThreadPool();
try {
waitForAllTaskRunning(
() ->
getJobDetails(
new RestClient(new Configuration(), executorService),
testEnv.getRestEndpoint(),
jobClient.getJobID()));
waitUntilCondition(
() -> {
// test metrics
try {
return checkSourceMetrics(
queryRestClient,
testEnv,
jobClient.getJobID(),
sourceName,
getTestDataSize(testRecordCollections));
} catch (Exception e) {
// skip failed assert try
return false;
}
});
} finally {
// Clean up
executorService.shutdown();
killJob(jobClient);
}
}
/**
* Test connector source with an idle reader.
*
* <p>This test will create 4 split in the external system, write test data to all splits, and
* consume back via a Flink job with 5 parallelism, so at least one parallelism / source reader
* will be idle (assigned with no splits). If the split enumerator of the source doesn't signal
* NoMoreSplitsEvent to the idle source reader, the Flink job will never spin to FINISHED state.
*
* <p>The number and order of records in each split consumed by Flink need to be identical to
* the test data written into the external system to pass this test. There's no requirement for
* record order across splits.
*
* <p>A bounded source is required for this test.
*/
@TestTemplate
@DisplayName("Test source with at least one idle parallelism")
public void testIdleReader(
TestEnvironment testEnv,
DataStreamSourceExternalContext<T> externalContext,
CheckpointingMode semantic)
throws Exception {
// Step 1: Preparation
TestingSourceSettings sourceSettings =
TestingSourceSettings.builder()
.setBoundedness(Boundedness.BOUNDED)
.setCheckpointingMode(semantic)
.build();
TestEnvironmentSettings envOptions =
TestEnvironmentSettings.builder()
.setConnectorJarPaths(externalContext.getConnectorJarPaths())
.build();
Source<T, ?, ?> source = tryCreateSource(externalContext, sourceSettings);
// Step 2: Write test data to external system
int splitNumber = 4;
List<List<T>> testRecordsLists = new ArrayList<>();
for (int i = 0; i < splitNumber; i++) {
testRecordsLists.add(generateAndWriteTestData(i, externalContext, sourceSettings));
}
// Step 3: Build and execute Flink job
StreamExecutionEnvironment execEnv = testEnv.createExecutionEnvironment(envOptions);
DataStreamSource<T> stream =
execEnv.fromSource(source, WatermarkStrategy.noWatermarks(), "Tested Source")
.setParallelism(splitNumber + 1);
CollectIteratorBuilder<T> iteratorBuilder = addCollectSink(stream);
JobClient jobClient = submitJob(execEnv, "Idle Reader Test");
// Step 4: Validate test data
try (CloseableIterator<T> resultIterator = iteratorBuilder.build(jobClient)) {
LOG.info("Checking test results");
checkResultWithSemantic(resultIterator, testRecordsLists, semantic, null);
}
// Step 5: Clean up
waitForJobStatus(jobClient, singletonList(JobStatus.FINISHED));
}
/**
* Test connector source with task manager failover.
*
* <p>This test will create 1 split in the external system, write test record set A into the
* split, restart task manager to trigger job failover, write test record set B into the split,
* and terminate the Flink job finally.
*
* <p>The number and order of records consumed by Flink should be identical to A before the
* failover and B after the failover in order to pass the test.
*
* <p>An unbounded source is required for this test, since TaskManager failover will be
* triggered in the middle of the test.
*/
@TestTemplate
@DisplayName("Test TaskManager failure")
public void testTaskManagerFailure(
TestEnvironment testEnv,
DataStreamSourceExternalContext<T> externalContext,
ClusterControllable controller,
CheckpointingMode semantic)
throws Exception {
// Step 1: Preparation
TestingSourceSettings sourceSettings =
TestingSourceSettings.builder()
.setBoundedness(Boundedness.CONTINUOUS_UNBOUNDED)
.setCheckpointingMode(semantic)
.build();
TestEnvironmentSettings envOptions =
TestEnvironmentSettings.builder()
.setConnectorJarPaths(externalContext.getConnectorJarPaths())
.build();
Source<T, ?, ?> source = tryCreateSource(externalContext, sourceSettings);
// Step 2: Write test data to external system
int splitIndex = 0;
List<T> testRecordsBeforeFailure =
externalContext.generateTestData(
sourceSettings, splitIndex, ThreadLocalRandom.current().nextLong());
ExternalSystemSplitDataWriter<T> externalSystemSplitDataWriter =
externalContext.createSourceSplitDataWriter(sourceSettings);
LOG.info(
"Writing {} records for split {} to external system",
testRecordsBeforeFailure.size(),
splitIndex);
externalSystemSplitDataWriter.writeRecords(testRecordsBeforeFailure);
// Step 3: Build and execute Flink job
StreamExecutionEnvironment execEnv = testEnv.createExecutionEnvironment(envOptions);
execEnv.enableCheckpointing(50);
DataStreamSource<T> stream =
execEnv.fromSource(source, WatermarkStrategy.noWatermarks(), "Tested Source")
.setParallelism(1);
CollectIteratorBuilder<T> iteratorBuilder = addCollectSink(stream);
JobClient jobClient = submitJob(execEnv, "TaskManager Failover Test");
// Step 4: Validate records before killing TaskManagers
CloseableIterator<T> iterator = iteratorBuilder.build(jobClient);
LOG.info("Checking records before killing TaskManagers");
checkResultWithSemantic(
iterator,
singletonList(testRecordsBeforeFailure),
semantic,
testRecordsBeforeFailure.size());
// Step 5: Trigger TaskManager failover
LOG.info("Trigger TaskManager failover");
controller.triggerTaskManagerFailover(jobClient, () -> {});
LOG.info("Waiting for job recovering from failure");
waitForJobStatus(jobClient, singletonList(JobStatus.RUNNING));
// Step 6: Write test data again to external system
List<T> testRecordsAfterFailure =
externalContext.generateTestData(
sourceSettings, splitIndex, ThreadLocalRandom.current().nextLong());
LOG.info(
"Writing {} records for split {} to external system",
testRecordsAfterFailure.size(),
splitIndex);
externalSystemSplitDataWriter.writeRecords(testRecordsAfterFailure);
// Step 7: Validate test result
LOG.info("Checking records after job failover");
checkResultWithSemantic(
iterator,
singletonList(testRecordsAfterFailure),
semantic,
testRecordsAfterFailure.size());
// Step 8: Clean up
terminateJob(jobClient);
waitForJobStatus(jobClient, singletonList(JobStatus.CANCELED));
iterator.close();
}
// ----------------------------- Helper Functions ---------------------------------
/**
* Generate a set of test records and write it to the given split writer.
*
* @param externalContext External context
* @return List of generated test records
*/
protected List<T> generateAndWriteTestData(
int splitIndex,
DataStreamSourceExternalContext<T> externalContext,
TestingSourceSettings testingSourceSettings) {
List<T> testRecords =
externalContext.generateTestData(
testingSourceSettings, splitIndex, ThreadLocalRandom.current().nextLong());
LOG.info(
"Writing {} records for split {} to external system",
testRecords.size(),
splitIndex);
externalContext
.createSourceSplitDataWriter(testingSourceSettings)
.writeRecords(testRecords);
return testRecords;
}
protected Source<T, ?, ?> tryCreateSource(
DataStreamSourceExternalContext<T> externalContext,
TestingSourceSettings sourceOptions) {
try {
return externalContext.createSource(sourceOptions);
} catch (UnsupportedOperationException e) {
throw new TestAbortedException("Cannot create source satisfying given options", e);
}
}
protected JobClient submitJob(StreamExecutionEnvironment env, String jobName) throws Exception {
LOG.info("Submitting Flink job {} to test environment", jobName);
return env.executeAsync(jobName);
}
/** Add a collect sink in the job. */
protected CollectIteratorBuilder<T> addCollectSink(DataStream<T> stream) {
TypeSerializer<T> serializer =
stream.getType()
.createSerializer(stream.getExecutionConfig().getSerializerConfig());
String accumulatorName = "dataStreamCollect_" + UUID.randomUUID();
CollectSinkOperatorFactory<T> factory =
new CollectSinkOperatorFactory<>(serializer, accumulatorName);
CollectStreamSink<T> sink = new CollectStreamSink<>(stream, factory);
String operatorUid = "dataStreamCollect";
sink.name("Data stream collect sink");
sink.uid(operatorUid);
stream.getExecutionEnvironment().addOperator(sink.getTransformation());
return new CollectIteratorBuilder<>(
operatorUid,
serializer,
accumulatorName,
stream.getExecutionEnvironment().getCheckpointConfig());
}
/**
* Generate a set of split writers.
*
* @param externalContext External context
* @param splitIndex the split index
* @param writer the writer to send data
* @return List of generated test records
*/
protected List<T> generateTestDataForWriter(
DataStreamSourceExternalContext<T> externalContext,
TestingSourceSettings sourceSettings,
int splitIndex,
ExternalSystemSplitDataWriter<T> writer) {
final List<T> testRecordCollection =
externalContext.generateTestData(
sourceSettings, splitIndex, ThreadLocalRandom.current().nextLong());
LOG.debug("Writing {} records to external system", testRecordCollection.size());
writer.writeRecords(testRecordCollection);
return testRecordCollection;
}
/**
* Get the size of test data.
*
* @param collections test data
* @return the size of test data
*/
protected int getTestDataSize(List<List<T>> collections) {
int sumSize = 0;
for (Collection<T> collection : collections) {
sumSize += collection.size();
}
return sumSize;
}
/**
* Compare the test data with the result.
*
* <p>If the source is bounded, limit should be null.
*
* @param resultIterator the data read from the job
* @param testData the test data
* @param semantic the supported semantic, see {@link CheckpointingMode}
* @param limit expected number of the data to read from the job
*/
protected void checkResultWithSemantic(
CloseableIterator<T> resultIterator,
List<List<T>> testData,
CheckpointingMode semantic,
Integer limit) {
if (limit != null) {
Runnable runnable =
() ->
CollectIteratorAssertions.assertThat(resultIterator)
.withNumRecordsLimit(limit)
.matchesRecordsFromSource(testData, semantic);
assertThatFuture(runAsync(runnable)).eventuallySucceeds();
} else {
CollectIteratorAssertions.assertThat(resultIterator)
.matchesRecordsFromSource(testData, semantic);
}
}
/** Compare the metrics. */
private boolean checkSourceMetrics(
MetricQuerier queryRestClient,
TestEnvironment testEnv,
JobID jobId,
String sourceName,
long allRecordSize)
throws Exception {
Double sumNumRecordsIn =
queryRestClient.getAggregatedMetricsByRestAPI(
testEnv.getRestEndpoint(),
jobId,
sourceName,
MetricNames.IO_NUM_RECORDS_IN,
null);
return Precision.equals(allRecordSize, sumNumRecordsIn);
}
private void killJob(JobClient jobClient) throws Exception {
terminateJob(jobClient);
waitForJobStatus(jobClient, singletonList(JobStatus.CANCELED));
}
/** Builder class for constructing {@link CollectResultIterator} of collect sink. */
protected static class CollectIteratorBuilder<T> {
private final String operatorUid;
private final TypeSerializer<T> serializer;
private final String accumulatorName;
private final CheckpointConfig checkpointConfig;
protected CollectIteratorBuilder(
String operatorUid,
TypeSerializer<T> serializer,
String accumulatorName,
CheckpointConfig checkpointConfig) {
this.operatorUid = operatorUid;
this.serializer = serializer;
this.accumulatorName = accumulatorName;
this.checkpointConfig = checkpointConfig;
}
protected CollectResultIterator<T> build(JobClient jobClient) {
CollectResultIterator<T> iterator =
new CollectResultIterator<>(
operatorUid,
serializer,
accumulatorName,
checkpointConfig,
RpcOptions.ASK_TIMEOUT_DURATION.defaultValue().toMillis());
iterator.setJobClient(jobClient);
return iterator;
}
}
}
|
googleapis/google-cloud-java | 35,757 | java-devicestreaming/proto-google-cloud-devicestreaming-v1/src/main/java/com/google/cloud/devicestreaming/v1/UpdateDeviceSessionRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/devicestreaming/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.devicestreaming.v1;
/**
*
*
* <pre>
* Request message for DirectAccessService.UpdateDeviceSession.
* </pre>
*
* Protobuf type {@code google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest}
*/
public final class UpdateDeviceSessionRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest)
UpdateDeviceSessionRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateDeviceSessionRequest.newBuilder() to construct.
private UpdateDeviceSessionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateDeviceSessionRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateDeviceSessionRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.devicestreaming.v1.ServiceProto
.internal_static_google_cloud_devicestreaming_v1_UpdateDeviceSessionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.devicestreaming.v1.ServiceProto
.internal_static_google_cloud_devicestreaming_v1_UpdateDeviceSessionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest.class,
com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest.Builder.class);
}
private int bitField0_;
public static final int DEVICE_SESSION_FIELD_NUMBER = 1;
private com.google.cloud.devicestreaming.v1.DeviceSession deviceSession_;
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the deviceSession field is set.
*/
@java.lang.Override
public boolean hasDeviceSession() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The deviceSession.
*/
@java.lang.Override
public com.google.cloud.devicestreaming.v1.DeviceSession getDeviceSession() {
return deviceSession_ == null
? com.google.cloud.devicestreaming.v1.DeviceSession.getDefaultInstance()
: deviceSession_;
}
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.devicestreaming.v1.DeviceSessionOrBuilder getDeviceSessionOrBuilder() {
return deviceSession_ == null
? com.google.cloud.devicestreaming.v1.DeviceSession.getDefaultInstance()
: deviceSession_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getDeviceSession());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getDeviceSession());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest)) {
return super.equals(obj);
}
com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest other =
(com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest) obj;
if (hasDeviceSession() != other.hasDeviceSession()) return false;
if (hasDeviceSession()) {
if (!getDeviceSession().equals(other.getDeviceSession())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasDeviceSession()) {
hash = (37 * hash) + DEVICE_SESSION_FIELD_NUMBER;
hash = (53 * hash) + getDeviceSession().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for DirectAccessService.UpdateDeviceSession.
* </pre>
*
* Protobuf type {@code google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest)
com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.devicestreaming.v1.ServiceProto
.internal_static_google_cloud_devicestreaming_v1_UpdateDeviceSessionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.devicestreaming.v1.ServiceProto
.internal_static_google_cloud_devicestreaming_v1_UpdateDeviceSessionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest.class,
com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest.Builder.class);
}
// Construct using com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDeviceSessionFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
deviceSession_ = null;
if (deviceSessionBuilder_ != null) {
deviceSessionBuilder_.dispose();
deviceSessionBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.devicestreaming.v1.ServiceProto
.internal_static_google_cloud_devicestreaming_v1_UpdateDeviceSessionRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest
getDefaultInstanceForType() {
return com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest build() {
com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest buildPartial() {
com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest result =
new com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.deviceSession_ =
deviceSessionBuilder_ == null ? deviceSession_ : deviceSessionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest) {
return mergeFrom((com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest other) {
if (other
== com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest.getDefaultInstance())
return this;
if (other.hasDeviceSession()) {
mergeDeviceSession(other.getDeviceSession());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getDeviceSessionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.devicestreaming.v1.DeviceSession deviceSession_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.devicestreaming.v1.DeviceSession,
com.google.cloud.devicestreaming.v1.DeviceSession.Builder,
com.google.cloud.devicestreaming.v1.DeviceSessionOrBuilder>
deviceSessionBuilder_;
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the deviceSession field is set.
*/
public boolean hasDeviceSession() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The deviceSession.
*/
public com.google.cloud.devicestreaming.v1.DeviceSession getDeviceSession() {
if (deviceSessionBuilder_ == null) {
return deviceSession_ == null
? com.google.cloud.devicestreaming.v1.DeviceSession.getDefaultInstance()
: deviceSession_;
} else {
return deviceSessionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDeviceSession(com.google.cloud.devicestreaming.v1.DeviceSession value) {
if (deviceSessionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
deviceSession_ = value;
} else {
deviceSessionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDeviceSession(
com.google.cloud.devicestreaming.v1.DeviceSession.Builder builderForValue) {
if (deviceSessionBuilder_ == null) {
deviceSession_ = builderForValue.build();
} else {
deviceSessionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDeviceSession(com.google.cloud.devicestreaming.v1.DeviceSession value) {
if (deviceSessionBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& deviceSession_ != null
&& deviceSession_
!= com.google.cloud.devicestreaming.v1.DeviceSession.getDefaultInstance()) {
getDeviceSessionBuilder().mergeFrom(value);
} else {
deviceSession_ = value;
}
} else {
deviceSessionBuilder_.mergeFrom(value);
}
if (deviceSession_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDeviceSession() {
bitField0_ = (bitField0_ & ~0x00000001);
deviceSession_ = null;
if (deviceSessionBuilder_ != null) {
deviceSessionBuilder_.dispose();
deviceSessionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.devicestreaming.v1.DeviceSession.Builder getDeviceSessionBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getDeviceSessionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.devicestreaming.v1.DeviceSessionOrBuilder getDeviceSessionOrBuilder() {
if (deviceSessionBuilder_ != null) {
return deviceSessionBuilder_.getMessageOrBuilder();
} else {
return deviceSession_ == null
? com.google.cloud.devicestreaming.v1.DeviceSession.getDefaultInstance()
: deviceSession_;
}
}
/**
*
*
* <pre>
* Required. DeviceSession to update.
* The DeviceSession's `name` field is used to identify the session to update
* "projects/{project_id}/deviceSessions/{session_id}"
* </pre>
*
* <code>
* .google.cloud.devicestreaming.v1.DeviceSession device_session = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.devicestreaming.v1.DeviceSession,
com.google.cloud.devicestreaming.v1.DeviceSession.Builder,
com.google.cloud.devicestreaming.v1.DeviceSessionOrBuilder>
getDeviceSessionFieldBuilder() {
if (deviceSessionBuilder_ == null) {
deviceSessionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.devicestreaming.v1.DeviceSession,
com.google.cloud.devicestreaming.v1.DeviceSession.Builder,
com.google.cloud.devicestreaming.v1.DeviceSessionOrBuilder>(
getDeviceSession(), getParentForChildren(), isClean());
deviceSession_ = null;
}
return deviceSessionBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. The list of fields to update.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest)
private static final com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest();
}
public static com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateDeviceSessionRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateDeviceSessionRequest>() {
@java.lang.Override
public UpdateDeviceSessionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateDeviceSessionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateDeviceSessionRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.devicestreaming.v1.UpdateDeviceSessionRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/openjpa | 35,798 | openjpa-lib/src/main/java/org/apache/openjpa/lib/util/collections/AbstractReferenceMap.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openjpa.lib.util.collections;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.lang.ref.Reference;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.SoftReference;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
/**
* An abstract implementation of a hash-based map that allows the entries to
* be removed by the garbage collector.
* <p>
* This class implements all the features necessary for a subclass reference
* hash-based map. Key-value entries are stored in instances of the
* <code>ReferenceEntry</code> class which can be overridden and replaced.
* The iterators can similarly be replaced, without the need to replace the KeySet,
* EntrySet and Values view classes.
* </p>
* <p>
* Overridable methods are provided to change the default hashing behaviour, and
* to change how entries are added to and removed from the map. Hopefully, all you
* need for unusual subclasses is here.
* </p>
* <p>
* When you construct an <code>AbstractReferenceMap</code>, you can specify what
* kind of references are used to store the map's keys and values.
* If non-hard references are used, then the garbage collector can remove
* mappings if a key or value becomes unreachable, or if the JVM's memory is
* running low. For information on how the different reference types behave,
* see {@link Reference}.
* </p>
* <p>
* Different types of references can be specified for keys and values.
* The keys can be configured to be weak but the values hard,
* in which case this class will behave like a
* <a href="http://java.sun.com/j2se/1.4/docs/api/java/util/WeakHashMap.html">
* <code>WeakHashMap</code></a>. However, you can also specify hard keys and
* weak values, or any other combination. The default constructor uses
* hard keys and soft values, providing a memory-sensitive cache.
* </p>
* <p>
* This {@link Map} implementation does <i>not</i> allow null elements.
* Attempting to add a null key or value to the map will raise a
* <code>NullPointerException</code>.
* </p>
* <p>
* All the available iterators can be reset back to the start by casting to
* <code>ResettableIterator</code> and calling <code>reset()</code>.
* </p>
* <p>
* This implementation is not synchronized.
* You can use {@link java.util.Collections#synchronizedMap} to
* provide synchronized access to a <code>ReferenceMap</code>.
* </p>
*
* @param <K> the type of the keys in this map
* @param <V> the type of the values in this map
*
* @see Reference
* @since 3.1 (extracted from ReferenceMap in 3.0)
*/
public abstract class AbstractReferenceMap<K, V> extends AbstractHashedMap<K, V> {
/**
* Reference type enum.
*/
public enum ReferenceStrength {
HARD(0), SOFT(1), WEAK(2);
/** value */
public final int value;
/**
* Resolve enum from int.
* @param value the int value
* @return ReferenceType
* @throws IllegalArgumentException if the specified value is invalid.
*/
public static ReferenceStrength resolve(final int value) {
switch (value) {
case 0:
return HARD;
case 1:
return SOFT;
case 2:
return WEAK;
default:
throw new IllegalArgumentException();
}
}
ReferenceStrength(final int value) {
this.value = value;
}
}
/**
* The reference type for keys.
*/
private ReferenceStrength keyType;
/**
* The reference type for values.
*/
private ReferenceStrength valueType;
/**
* Should the value be automatically purged when the associated key has been collected?
*/
private boolean purgeValues;
/**
* ReferenceQueue used to eliminate stale mappings.
* See purge.
*/
private transient ReferenceQueue<Object> queue;
//-----------------------------------------------------------------------
/**
* Constructor used during deserialization.
*/
protected AbstractReferenceMap() {
super();
}
/**
* Constructs a new empty map with the specified reference types,
* load factor and initial capacity.
*
* @param keyType the type of reference to use for keys;
* must be {@link ReferenceStrength#HARD HARD},
* {@link ReferenceStrength#SOFT SOFT},
* {@link ReferenceStrength#WEAK WEAK}
* @param valueType the type of reference to use for values;
* must be {@link ReferenceStrength#HARD},
* {@link ReferenceStrength#SOFT SOFT},
* {@link ReferenceStrength#WEAK WEAK}
* @param capacity the initial capacity for the map
* @param loadFactor the load factor for the map
* @param purgeValues should the value be automatically purged when the
* key is garbage collected
*/
protected AbstractReferenceMap(
final ReferenceStrength keyType, final ReferenceStrength valueType, final int capacity,
final float loadFactor, final boolean purgeValues) {
super(capacity, loadFactor);
this.keyType = keyType;
this.valueType = valueType;
this.purgeValues = purgeValues;
}
/**
* Initialise this subclass during construction, cloning or deserialization.
*/
@Override
protected void init() {
queue = new ReferenceQueue<>();
}
//-----------------------------------------------------------------------
/**
* Gets the size of the map.
*
* @return the size
*/
@Override
public int size() {
purgeBeforeRead();
return super.size();
}
/**
* Checks whether the map is currently empty.
*
* @return true if the map is currently size zero
*/
@Override
public boolean isEmpty() {
purgeBeforeRead();
return super.isEmpty();
}
/**
* Checks whether the map contains the specified key.
*
* @param key the key to search for
* @return true if the map contains the key
*/
@Override
public boolean containsKey(final Object key) {
purgeBeforeRead();
final Entry<K, V> entry = getEntry(key);
if (entry == null) {
return false;
}
return entry.getValue() != null;
}
/**
* Checks whether the map contains the specified value.
*
* @param value the value to search for
* @return true if the map contains the value
*/
@Override
public boolean containsValue(final Object value) {
purgeBeforeRead();
if (value == null) {
return false;
}
return super.containsValue(value);
}
/**
* Gets the value mapped to the key specified.
*
* @param key the key
* @return the mapped value, null if no match
*/
@Override
public V get(final Object key) {
purgeBeforeRead();
final Entry<K, V> entry = getEntry(key);
if (entry == null) {
return null;
}
return entry.getValue();
}
/**
* Puts a key-value mapping into this map.
* Neither the key nor the value may be null.
*
* @param key the key to add, must not be null
* @param value the value to add, must not be null
* @return the value previously mapped to this key, null if none
* @throws NullPointerException if either the key or value is null
*/
@Override
public V put(final K key, final V value) {
if (key == null) {
throw new NullPointerException("null keys not allowed");
}
if (value == null) {
throw new NullPointerException("null values not allowed");
}
purgeBeforeWrite();
return super.put(key, value);
}
/**
* Removes the specified mapping from this map.
*
* @param key the mapping to remove
* @return the value mapped to the removed key, null if key not in map
*/
@Override
public V remove(final Object key) {
if (key == null) {
return null;
}
purgeBeforeWrite();
return super.remove(key);
}
/**
* Clears this map.
*/
@Override
public void clear() {
super.clear();
// drain the queue
while (queue.poll() != null) {
// empty
}
}
//-----------------------------------------------------------------------
/**
* Gets a MapIterator over the reference map.
* The iterator only returns valid key/value pairs.
*
* @return a map iterator
*/
@Override
public MapIterator<K, V> mapIterator() {
return new ReferenceMapIterator<>(this);
}
/**
* Returns a set view of this map's entries.
* An iterator returned entry is valid until <code>next()</code> is called again.
* The <code>setValue()</code> method on the <code>toArray</code> entries has no effect.
*
* @return a set view of this map's entries
*/
@Override
public Set<Entry<K, V>> entrySet() {
if (entrySet == null) {
entrySet = new ReferenceEntrySet<>(this);
}
return entrySet;
}
/**
* Returns a set view of this map's keys.
*
* @return a set view of this map's keys
*/
@Override
public Set<K> keySet() {
if (keySet == null) {
keySet = new ReferenceKeySet<>(this);
}
return keySet;
}
/**
* Returns a collection view of this map's values.
*
* @return a set view of this map's values
*/
@Override
public Collection<V> values() {
if (values == null) {
values = new ReferenceValues<>(this);
}
return values;
}
//-----------------------------------------------------------------------
/**
* Purges stale mappings from this map before read operations.
* <p>
* This implementation calls {@link #purge()} to maintain a consistent state.
*/
protected void purgeBeforeRead() {
purge();
}
/**
* Purges stale mappings from this map before write operations.
* <p>
* This implementation calls {@link #purge()} to maintain a consistent state.
*/
protected void purgeBeforeWrite() {
purge();
}
/**
* Purges stale mappings from this map.
* <p>
* Note that this method is not synchronized! Special
* care must be taken if, for instance, you want stale
* mappings to be removed on a periodic basis by some
* background thread.
*/
protected void purge() {
Reference<?> ref = queue.poll();
while (ref != null) {
purge(ref);
ref = queue.poll();
}
}
/**
* Purges the specified reference.
*
* @param ref the reference to purge
*/
protected void purge(final Reference<?> ref) {
// The hashCode of the reference is the hashCode of the
// mapping key, even if the reference refers to the
// mapping value...
final int hash = ref.hashCode();
final int index = hashIndex(hash, data.length);
HashEntry<K, V> previous = null;
HashEntry<K, V> entry = data[index];
while (entry != null) {
ReferenceEntry<K, V> refEntry = (ReferenceEntry<K, V>) entry;
if (refEntry.purge(ref)) {
if (previous == null) {
data[index] = entry.next;
} else {
previous.next = entry.next;
}
this.size--;
refEntry.onPurge();
return;
}
previous = entry;
entry = entry.next;
}
}
//-----------------------------------------------------------------------
/**
* Gets the entry mapped to the key specified.
*
* @param key the key
* @return the entry, null if no match
*/
@Override
protected HashEntry<K, V> getEntry(final Object key) {
if (key == null) {
return null;
}
return super.getEntry(key);
}
/**
* Gets the hash code for a MapEntry.
* Subclasses can override this, for example to use the identityHashCode.
*
* @param key the key to get a hash code for, may be null
* @param value the value to get a hash code for, may be null
* @return the hash code, as per the MapEntry specification
*/
protected int hashEntry(final Object key, final Object value) {
return (key == null ? 0 : key.hashCode()) ^
(value == null ? 0 : value.hashCode());
}
/**
* Compares two keys, in internal converted form, to see if they are equal.
* <p>
* This implementation converts the key from the entry to a real reference
* before comparison.
*
* @param key1 the first key to compare passed in from outside
* @param key2 the second key extracted from the entry via <code>entry.key</code>
* @return true if equal
*/
@Override
@SuppressWarnings("unchecked")
protected boolean isEqualKey(final Object key1, Object key2) {
key2 = keyType == ReferenceStrength.HARD ? key2 : ((Reference<K>) key2).get();
return key1 == key2 || key1.equals(key2);
}
/**
* Creates a ReferenceEntry instead of a HashEntry.
*
* @param next the next entry in sequence
* @param hashCode the hash code to use
* @param key the key to store
* @param value the value to store
* @return the newly created entry
*/
@Override
protected ReferenceEntry<K, V> createEntry(final HashEntry<K, V> next, final int hashCode,
final K key, final V value) {
return new ReferenceEntry<>(this, next, hashCode, key, value);
}
/**
* Creates an entry set iterator.
*
* @return the entrySet iterator
*/
@Override
protected Iterator<Entry<K, V>> createEntrySetIterator() {
return new ReferenceEntrySetIterator<>(this);
}
/**
* Creates an key set iterator.
*
* @return the keySet iterator
*/
@Override
protected Iterator<K> createKeySetIterator() {
return new ReferenceKeySetIterator<>(this);
}
/**
* Creates an values iterator.
*
* @return the values iterator
*/
@Override
protected Iterator<V> createValuesIterator() {
return new ReferenceValuesIterator<>(this);
}
//-----------------------------------------------------------------------
/**
* EntrySet implementation.
*/
static class ReferenceEntrySet<K, V> extends EntrySet<K, V> {
protected ReferenceEntrySet(final AbstractHashedMap<K, V> parent) {
super(parent);
}
@Override
public Object[] toArray() {
return toArray(new Object[size()]);
}
@Override
public <T> T[] toArray(final T[] arr) {
// special implementation to handle disappearing entries
final ArrayList<Entry<K, V>> list = new ArrayList<>(size());
for (final Entry<K, V> entry : this) {
list.add(new DefaultMapEntry<>(entry));
}
return list.toArray(arr);
}
}
//-----------------------------------------------------------------------
/**
* KeySet implementation.
*/
static class ReferenceKeySet<K> extends KeySet<K> {
protected ReferenceKeySet(final AbstractHashedMap<K, ?> parent) {
super(parent);
}
@Override
public Object[] toArray() {
return toArray(new Object[size()]);
}
@Override
public <T> T[] toArray(final T[] arr) {
// special implementation to handle disappearing keys
final List<K> list = new ArrayList<>(size());
for (final K key : this) {
list.add(key);
}
return list.toArray(arr);
}
}
//-----------------------------------------------------------------------
/**
* Values implementation.
*/
static class ReferenceValues<V> extends Values<V> {
protected ReferenceValues(final AbstractHashedMap<?, V> parent) {
super(parent);
}
@Override
public Object[] toArray() {
return toArray(new Object[size()]);
}
@Override
public <T> T[] toArray(final T[] arr) {
// special implementation to handle disappearing values
final List<V> list = new ArrayList<>(size());
for (final V value : this) {
list.add(value);
}
return list.toArray(arr);
}
}
//-----------------------------------------------------------------------
/**
* A MapEntry implementation for the map.
* <p>
* If getKey() or getValue() returns null, it means
* the mapping is stale and should be removed.
*
* @since 3.1
*/
protected static class ReferenceEntry<K, V> extends HashEntry<K, V> {
/** The parent map */
private final AbstractReferenceMap<K, V> parent;
/**
* Creates a new entry object for the ReferenceMap.
*
* @param parent the parent map
* @param next the next entry in the hash bucket
* @param hashCode the hash code of the key
* @param key the key
* @param value the value
*/
public ReferenceEntry(final AbstractReferenceMap<K, V> parent, final HashEntry<K, V> next,
final int hashCode, final K key, final V value) {
super(next, hashCode, null, null);
this.parent = parent;
this.key = toReference(parent.keyType, key, hashCode);
this.value = toReference(parent.valueType, value, hashCode); // the key hashCode is passed in deliberately
}
/**
* Gets the key from the entry.
* This method dereferences weak and soft keys and thus may return null.
*
* @return the key, which may be null if it was garbage collected
*/
@Override
@SuppressWarnings("unchecked")
public K getKey() {
return (K) (parent.keyType == ReferenceStrength.HARD ? key : ((Reference<K>) key).get());
}
/**
* Gets the value from the entry.
* This method dereferences weak and soft value and thus may return null.
*
* @return the value, which may be null if it was garbage collected
*/
@Override
@SuppressWarnings("unchecked")
public V getValue() {
return (V) (parent.valueType == ReferenceStrength.HARD ? value : ((Reference<V>) value).get());
}
/**
* Sets the value of the entry.
*
* @param obj the object to store
* @return the previous value
*/
@Override
@SuppressWarnings("unchecked")
public V setValue(final V obj) {
final V old = getValue();
if (parent.valueType != ReferenceStrength.HARD) {
((Reference<V>) value).clear();
}
value = toReference(parent.valueType, obj, hashCode);
return old;
}
/**
* Compares this map entry to another.
* <p>
* This implementation uses <code>isEqualKey</code> and
* <code>isEqualValue</code> on the main map for comparison.
*
* @param obj the other map entry to compare to
* @return true if equal, false if not
*/
@Override
public boolean equals(final Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof Map.Entry)) {
return false;
}
final Entry<?, ?> entry = (Entry<?, ?>)obj;
final Object entryKey = entry.getKey(); // convert to hard reference
final Object entryValue = entry.getValue(); // convert to hard reference
if (entryKey == null || entryValue == null) {
return false;
}
// compare using map methods, aiding identity subclass
// note that key is direct access and value is via method
return parent.isEqualKey(entryKey, key) &&
parent.isEqualValue(entryValue, getValue());
}
/**
* Gets the hashcode of the entry using temporary hard references.
* <p>
* This implementation uses <code>hashEntry</code> on the main map.
*
* @return the hashcode of the entry
*/
@Override
public int hashCode() {
return parent.hashEntry(getKey(), getValue());
}
/**
* Constructs a reference of the given type to the given referent.
* The reference is registered with the queue for later purging.
*
* @param <T> the type of the referenced object
* @param type HARD, SOFT or WEAK
* @param referent the object to refer to
* @param hash the hash code of the <i>key</i> of the mapping;
* this number might be different from referent.hashCode() if
* the referent represents a value and not a key
* @return the reference to the object
*/
protected <T> Object toReference(final ReferenceStrength type, final T referent, final int hash) {
if (type == ReferenceStrength.HARD) {
return referent;
}
if (type == ReferenceStrength.SOFT) {
return new SoftRef<>(hash, referent, parent.queue);
}
if (type == ReferenceStrength.WEAK) {
return new WeakRef<>(hash, referent, parent.queue);
}
throw new Error();
}
/**
* This is the callback for custom "after purge" logic
*/
protected void onPurge() {
// empty
}
/**
* Purges the specified reference
* @param ref the reference to purge
* @return true or false
*/
protected boolean purge(final Reference<?> ref) {
boolean r = parent.keyType != ReferenceStrength.HARD && key == ref;
r = r || parent.valueType != ReferenceStrength.HARD && value == ref;
if (r) {
if (parent.keyType != ReferenceStrength.HARD) {
((Reference<?>) key).clear();
}
if (parent.valueType != ReferenceStrength.HARD) {
((Reference<?>) value).clear();
} else if (parent.purgeValues) {
nullValue();
}
}
return r;
}
/**
* Gets the next entry in the bucket.
*
* @return the next entry in the bucket
*/
protected ReferenceEntry<K, V> next() {
return (ReferenceEntry<K, V>) next;
}
/**
* This method can be overriden to provide custom logic to purge value
*/
protected void nullValue() {
value = null;
}
}
//-----------------------------------------------------------------------
/**
* Base iterator class.
*/
static class ReferenceBaseIterator<K, V> {
/** The parent map */
final AbstractReferenceMap<K, V> parent;
// These fields keep track of where we are in the table.
int index;
ReferenceEntry<K, V> entry;
ReferenceEntry<K, V> previous;
// These Object fields provide hard references to the
// current and next entry; this assures that if hasNext()
// returns true, next() will actually return a valid element.
K currentKey, nextKey;
V currentValue, nextValue;
int expectedModCount;
public ReferenceBaseIterator(final AbstractReferenceMap<K, V> parent) {
super();
this.parent = parent;
index = parent.size() != 0 ? parent.data.length : 0;
// have to do this here! size() invocation above
// may have altered the modCount.
expectedModCount = parent.modCount;
}
public boolean hasNext() {
checkMod();
while (nextNull()) {
ReferenceEntry<K, V> e = entry;
int i = index;
while (e == null && i > 0) {
i--;
e = (ReferenceEntry<K, V>) parent.data[i];
}
entry = e;
index = i;
if (e == null) {
currentKey = null;
currentValue = null;
return false;
}
nextKey = e.getKey();
nextValue = e.getValue();
if (nextNull()) {
entry = entry.next();
}
}
return true;
}
private void checkMod() {
if (parent.modCount != expectedModCount) {
throw new ConcurrentModificationException();
}
}
private boolean nextNull() {
return nextKey == null || nextValue == null;
}
protected ReferenceEntry<K, V> nextEntry() {
checkMod();
if (nextNull() && !hasNext()) {
throw new NoSuchElementException();
}
previous = entry;
entry = entry.next();
currentKey = nextKey;
currentValue = nextValue;
nextKey = null;
nextValue = null;
return previous;
}
protected ReferenceEntry<K, V> currentEntry() {
checkMod();
return previous;
}
public void remove() {
checkMod();
if (previous == null) {
throw new IllegalStateException();
}
parent.remove(currentKey);
previous = null;
currentKey = null;
currentValue = null;
expectedModCount = parent.modCount;
}
}
/**
* The EntrySet iterator.
*/
static class ReferenceEntrySetIterator<K, V>
extends ReferenceBaseIterator<K, V> implements Iterator<Entry<K, V>> {
public ReferenceEntrySetIterator(final AbstractReferenceMap<K, V> parent) {
super(parent);
}
@Override
public Entry<K, V> next() {
return nextEntry();
}
}
/**
* The keySet iterator.
*/
static class ReferenceKeySetIterator<K> extends ReferenceBaseIterator<K, Object> implements Iterator<K> {
@SuppressWarnings("unchecked")
ReferenceKeySetIterator(final AbstractReferenceMap<K, ?> parent) {
super((AbstractReferenceMap<K, Object>) parent);
}
@Override
public K next() {
return nextEntry().getKey();
}
}
/**
* The values iterator.
*/
static class ReferenceValuesIterator<V> extends ReferenceBaseIterator<Object, V> implements Iterator<V> {
@SuppressWarnings("unchecked")
ReferenceValuesIterator(final AbstractReferenceMap<?, V> parent) {
super((AbstractReferenceMap<Object, V>) parent);
}
@Override
public V next() {
return nextEntry().getValue();
}
}
/**
* The MapIterator implementation.
*/
static class ReferenceMapIterator<K, V> extends ReferenceBaseIterator<K, V> implements MapIterator<K, V> {
protected ReferenceMapIterator(final AbstractReferenceMap<K, V> parent) {
super(parent);
}
@Override
public K next() {
return nextEntry().getKey();
}
@Override
public K getKey() {
final HashEntry<K, V> current = currentEntry();
if (current == null) {
throw new IllegalStateException(AbstractHashedMap.GETKEY_INVALID);
}
return current.getKey();
}
@Override
public V getValue() {
final HashEntry<K, V> current = currentEntry();
if (current == null) {
throw new IllegalStateException(AbstractHashedMap.GETVALUE_INVALID);
}
return current.getValue();
}
@Override
public V setValue(final V value) {
final HashEntry<K, V> current = currentEntry();
if (current == null) {
throw new IllegalStateException(AbstractHashedMap.SETVALUE_INVALID);
}
return current.setValue(value);
}
}
//-----------------------------------------------------------------------
// These two classes store the hashCode of the key of
// of the mapping, so that after they're dequeued a quick
// lookup of the bucket in the table can occur.
/**
* A soft reference holder.
*/
static class SoftRef<T> extends SoftReference<T> {
/** the hashCode of the key (even if the reference points to a value) */
private final int hash;
public SoftRef(final int hash, final T r, final ReferenceQueue<? super T> q) {
super(r, q);
this.hash = hash;
}
@Override
public int hashCode() {
return hash;
}
}
/**
* A weak reference holder.
*/
static class WeakRef<T> extends WeakReference<T> {
/** the hashCode of the key (even if the reference points to a value) */
private final int hash;
public WeakRef(final int hash, final T r, final ReferenceQueue<? super T> q) {
super(r, q);
this.hash = hash;
}
@Override
public int hashCode() {
return hash;
}
}
//-----------------------------------------------------------------------
/**
* Replaces the superclass method to store the state of this class.
* <p>
* Serialization is not one of the JDK's nicest topics. Normal serialization will
* initialise the superclass before the subclass. Sometimes however, this isn't
* what you want, as in this case the <code>put()</code> method on read can be
* affected by subclass state.
* <p>
* The solution adopted here is to serialize the state data of this class in
* this protected method. This method must be called by the
* <code>writeObject()</code> of the first serializable subclass.
* <p>
* Subclasses may override if they have a specific field that must be present
* on read before this implementation will work. Generally, the read determines
* what must be serialized here, if anything.
*
* @param out the output stream
* @throws IOException if an error occurs while writing to the stream
*/
@Override
protected void doWriteObject(final ObjectOutputStream out) throws IOException {
out.writeInt(keyType.value);
out.writeInt(valueType.value);
out.writeBoolean(purgeValues);
out.writeFloat(loadFactor);
out.writeInt(data.length);
for (final MapIterator<K, V> it = mapIterator(); it.hasNext();) {
out.writeObject(it.next());
out.writeObject(it.getValue());
}
out.writeObject(null); // null terminate map
// do not call super.doWriteObject() as code there doesn't work for reference map
}
/**
* Replaces the superclass method to read the state of this class.
* <p>
* Serialization is not one of the JDK's nicest topics. Normal serialization will
* initialise the superclass before the subclass. Sometimes however, this isn't
* what you want, as in this case the <code>put()</code> method on read can be
* affected by subclass state.
* <p>
* The solution adopted here is to deserialize the state data of this class in
* this protected method. This method must be called by the
* <code>readObject()</code> of the first serializable subclass.
* <p>
* Subclasses may override if the subclass has a specific field that must be present
* before <code>put()</code> or <code>calculateThreshold()</code> will work correctly.
*
* @param in the input stream
* @throws IOException if an error occurs while reading from the stream
* @throws ClassNotFoundException if an object read from the stream can not be loaded
*/
@Override
@SuppressWarnings("unchecked")
protected void doReadObject(final ObjectInputStream in) throws IOException, ClassNotFoundException {
this.keyType = ReferenceStrength.resolve(in.readInt());
this.valueType = ReferenceStrength.resolve(in.readInt());
this.purgeValues = in.readBoolean();
this.loadFactor = in.readFloat();
final int capacity = in.readInt();
init();
data = new HashEntry[capacity];
// COLLECTIONS-599: Calculate threshold before populating, otherwise it will be 0
// when it hits AbstractHashedMap.checkCapacity() and so will unnecessarily
// double up the size of the "data" array during population.
//
// NB: AbstractHashedMap.doReadObject() DOES calculate the threshold before populating.
//
threshold = calculateThreshold(data.length, loadFactor);
while (true) {
final K key = (K) in.readObject();
if (key == null) {
break;
}
final V value = (V) in.readObject();
put(key, value);
}
// do not call super.doReadObject() as code there doesn't work for reference map
}
/**
* Provided protected read-only access to the key type.
* @param type the type to check against.
* @return true if keyType has the specified type
*/
protected boolean isKeyType(final ReferenceStrength type) {
return this.keyType == type;
}
/**
* Provided protected read-only access to the value type.
* @param type the type to check against.
* @return true if valueType has the specified type
*/
protected boolean isValueType(final ReferenceStrength type) {
return this.valueType == type;
}
}
|
googleapis/google-api-java-client-services | 35,840 | clients/google-api-services-sqladmin/v1/2.0.0/com/google/api/services/sqladmin/model/ExportContext.java | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.sqladmin.model;
/**
* Database instance export context.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud SQL Admin API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class ExportContext extends com.google.api.client.json.GenericJson {
/**
* Options for exporting BAK files (SQL Server-only)
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private BakExportOptions bakExportOptions;
/**
* Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private CsvExportOptions csvExportOptions;
/**
* Databases to be exported. `MySQL instances:` If `fileType` is `SQL` and no database is
* specified, all databases are exported, except for the `mysql` system database. If `fileType` is
* `CSV`, you can specify one database, either by using this property or by using the
* `csvExportOptions.selectQuery` property, which takes precedence over this property. `PostgreSQL
* instances:` If you don't specify a database by name, all user databases in the instance are
* exported. This excludes system databases and Cloud SQL databases used to manage internal
* operations. Exporting all user databases is only available for directory-formatted parallel
* export. If `fileType` is `CSV`, this database must match the one specified in the
* `csvExportOptions.selectQuery` property. `SQL Server instances:` You must specify one database
* to be exported, and the `fileType` must be `BAK`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> databases;
/**
* The file type for the specified uri.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String fileType;
/**
* This is always `sql#exportContext`.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String kind;
/**
* Whether to perform a serverless export.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean offload;
/**
* Options for exporting data as SQL statements.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private SqlExportOptions sqlExportOptions;
/**
* Optional. Export parameters specific to SQL Server TDE certificates
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private TdeExportOptions tdeExportOptions;
/**
* The path to the file in Google Cloud Storage where the export will be stored. The URI is in the
* form `gs://bucketName/fileName`. If the file already exists, the request succeeds, but the
* operation fails. If `fileType` is `SQL` and the filename ends with .gz, the contents are
* compressed.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String uri;
/**
* Options for exporting BAK files (SQL Server-only)
* @return value or {@code null} for none
*/
public BakExportOptions getBakExportOptions() {
return bakExportOptions;
}
/**
* Options for exporting BAK files (SQL Server-only)
* @param bakExportOptions bakExportOptions or {@code null} for none
*/
public ExportContext setBakExportOptions(BakExportOptions bakExportOptions) {
this.bakExportOptions = bakExportOptions;
return this;
}
/**
* Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only.
* @return value or {@code null} for none
*/
public CsvExportOptions getCsvExportOptions() {
return csvExportOptions;
}
/**
* Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only.
* @param csvExportOptions csvExportOptions or {@code null} for none
*/
public ExportContext setCsvExportOptions(CsvExportOptions csvExportOptions) {
this.csvExportOptions = csvExportOptions;
return this;
}
/**
* Databases to be exported. `MySQL instances:` If `fileType` is `SQL` and no database is
* specified, all databases are exported, except for the `mysql` system database. If `fileType` is
* `CSV`, you can specify one database, either by using this property or by using the
* `csvExportOptions.selectQuery` property, which takes precedence over this property. `PostgreSQL
* instances:` If you don't specify a database by name, all user databases in the instance are
* exported. This excludes system databases and Cloud SQL databases used to manage internal
* operations. Exporting all user databases is only available for directory-formatted parallel
* export. If `fileType` is `CSV`, this database must match the one specified in the
* `csvExportOptions.selectQuery` property. `SQL Server instances:` You must specify one database
* to be exported, and the `fileType` must be `BAK`.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getDatabases() {
return databases;
}
/**
* Databases to be exported. `MySQL instances:` If `fileType` is `SQL` and no database is
* specified, all databases are exported, except for the `mysql` system database. If `fileType` is
* `CSV`, you can specify one database, either by using this property or by using the
* `csvExportOptions.selectQuery` property, which takes precedence over this property. `PostgreSQL
* instances:` If you don't specify a database by name, all user databases in the instance are
* exported. This excludes system databases and Cloud SQL databases used to manage internal
* operations. Exporting all user databases is only available for directory-formatted parallel
* export. If `fileType` is `CSV`, this database must match the one specified in the
* `csvExportOptions.selectQuery` property. `SQL Server instances:` You must specify one database
* to be exported, and the `fileType` must be `BAK`.
* @param databases databases or {@code null} for none
*/
public ExportContext setDatabases(java.util.List<java.lang.String> databases) {
this.databases = databases;
return this;
}
/**
* The file type for the specified uri.
* @return value or {@code null} for none
*/
public java.lang.String getFileType() {
return fileType;
}
/**
* The file type for the specified uri.
* @param fileType fileType or {@code null} for none
*/
public ExportContext setFileType(java.lang.String fileType) {
this.fileType = fileType;
return this;
}
/**
* This is always `sql#exportContext`.
* @return value or {@code null} for none
*/
public java.lang.String getKind() {
return kind;
}
/**
* This is always `sql#exportContext`.
* @param kind kind or {@code null} for none
*/
public ExportContext setKind(java.lang.String kind) {
this.kind = kind;
return this;
}
/**
* Whether to perform a serverless export.
* @return value or {@code null} for none
*/
public java.lang.Boolean getOffload() {
return offload;
}
/**
* Whether to perform a serverless export.
* @param offload offload or {@code null} for none
*/
public ExportContext setOffload(java.lang.Boolean offload) {
this.offload = offload;
return this;
}
/**
* Options for exporting data as SQL statements.
* @return value or {@code null} for none
*/
public SqlExportOptions getSqlExportOptions() {
return sqlExportOptions;
}
/**
* Options for exporting data as SQL statements.
* @param sqlExportOptions sqlExportOptions or {@code null} for none
*/
public ExportContext setSqlExportOptions(SqlExportOptions sqlExportOptions) {
this.sqlExportOptions = sqlExportOptions;
return this;
}
/**
* Optional. Export parameters specific to SQL Server TDE certificates
* @return value or {@code null} for none
*/
public TdeExportOptions getTdeExportOptions() {
return tdeExportOptions;
}
/**
* Optional. Export parameters specific to SQL Server TDE certificates
* @param tdeExportOptions tdeExportOptions or {@code null} for none
*/
public ExportContext setTdeExportOptions(TdeExportOptions tdeExportOptions) {
this.tdeExportOptions = tdeExportOptions;
return this;
}
/**
* The path to the file in Google Cloud Storage where the export will be stored. The URI is in the
* form `gs://bucketName/fileName`. If the file already exists, the request succeeds, but the
* operation fails. If `fileType` is `SQL` and the filename ends with .gz, the contents are
* compressed.
* @return value or {@code null} for none
*/
public java.lang.String getUri() {
return uri;
}
/**
* The path to the file in Google Cloud Storage where the export will be stored. The URI is in the
* form `gs://bucketName/fileName`. If the file already exists, the request succeeds, but the
* operation fails. If `fileType` is `SQL` and the filename ends with .gz, the contents are
* compressed.
* @param uri uri or {@code null} for none
*/
public ExportContext setUri(java.lang.String uri) {
this.uri = uri;
return this;
}
@Override
public ExportContext set(String fieldName, Object value) {
return (ExportContext) super.set(fieldName, value);
}
@Override
public ExportContext clone() {
return (ExportContext) super.clone();
}
/**
* Options for exporting BAK files (SQL Server-only)
*/
public static final class BakExportOptions extends com.google.api.client.json.GenericJson {
/**
* Type of this bak file will be export, FULL or DIFF, SQL Server only
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String bakType;
/**
* Deprecated: copy_only is deprecated. Use differential_base instead
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean copyOnly;
/**
* Whether or not the backup can be used as a differential base copy_only backup can not be served
* as differential base
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean differentialBase;
/**
* Optional. The end timestamp when transaction log will be included in the export operation. [RFC
* 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in
* UTC. When omitted, all available logs until current time will be included. Only applied to
* Cloud SQL for SQL Server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String exportLogEndTime;
/**
* Optional. The begin timestamp when transaction log will be included in the export operation.
* [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`)
* in UTC. When omitted, all available logs from the beginning of retention period will be
* included. Only applied to Cloud SQL for SQL Server.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String exportLogStartTime;
/**
* Option for specifying how many stripes to use for the export. If blank, and the value of the
* striped field is true, the number of stripes is automatically chosen.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer stripeCount;
/**
* Whether or not the export should be striped.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean striped;
/**
* Type of this bak file will be export, FULL or DIFF, SQL Server only
* @return value or {@code null} for none
*/
public java.lang.String getBakType() {
return bakType;
}
/**
* Type of this bak file will be export, FULL or DIFF, SQL Server only
* @param bakType bakType or {@code null} for none
*/
public BakExportOptions setBakType(java.lang.String bakType) {
this.bakType = bakType;
return this;
}
/**
* Deprecated: copy_only is deprecated. Use differential_base instead
* @return value or {@code null} for none
*/
public java.lang.Boolean getCopyOnly() {
return copyOnly;
}
/**
* Deprecated: copy_only is deprecated. Use differential_base instead
* @param copyOnly copyOnly or {@code null} for none
*/
public BakExportOptions setCopyOnly(java.lang.Boolean copyOnly) {
this.copyOnly = copyOnly;
return this;
}
/**
* Whether or not the backup can be used as a differential base copy_only backup can not be served
* as differential base
* @return value or {@code null} for none
*/
public java.lang.Boolean getDifferentialBase() {
return differentialBase;
}
/**
* Whether or not the backup can be used as a differential base copy_only backup can not be served
* as differential base
* @param differentialBase differentialBase or {@code null} for none
*/
public BakExportOptions setDifferentialBase(java.lang.Boolean differentialBase) {
this.differentialBase = differentialBase;
return this;
}
/**
* Optional. The end timestamp when transaction log will be included in the export operation. [RFC
* 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in
* UTC. When omitted, all available logs until current time will be included. Only applied to
* Cloud SQL for SQL Server.
* @return value or {@code null} for none
*/
public String getExportLogEndTime() {
return exportLogEndTime;
}
/**
* Optional. The end timestamp when transaction log will be included in the export operation. [RFC
* 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`) in
* UTC. When omitted, all available logs until current time will be included. Only applied to
* Cloud SQL for SQL Server.
* @param exportLogEndTime exportLogEndTime or {@code null} for none
*/
public BakExportOptions setExportLogEndTime(String exportLogEndTime) {
this.exportLogEndTime = exportLogEndTime;
return this;
}
/**
* Optional. The begin timestamp when transaction log will be included in the export operation.
* [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`)
* in UTC. When omitted, all available logs from the beginning of retention period will be
* included. Only applied to Cloud SQL for SQL Server.
* @return value or {@code null} for none
*/
public String getExportLogStartTime() {
return exportLogStartTime;
}
/**
* Optional. The begin timestamp when transaction log will be included in the export operation.
* [RFC 3339](https://tools.ietf.org/html/rfc3339) format (for example, `2023-10-01T16:19:00.094`)
* in UTC. When omitted, all available logs from the beginning of retention period will be
* included. Only applied to Cloud SQL for SQL Server.
* @param exportLogStartTime exportLogStartTime or {@code null} for none
*/
public BakExportOptions setExportLogStartTime(String exportLogStartTime) {
this.exportLogStartTime = exportLogStartTime;
return this;
}
/**
* Option for specifying how many stripes to use for the export. If blank, and the value of the
* striped field is true, the number of stripes is automatically chosen.
* @return value or {@code null} for none
*/
public java.lang.Integer getStripeCount() {
return stripeCount;
}
/**
* Option for specifying how many stripes to use for the export. If blank, and the value of the
* striped field is true, the number of stripes is automatically chosen.
* @param stripeCount stripeCount or {@code null} for none
*/
public BakExportOptions setStripeCount(java.lang.Integer stripeCount) {
this.stripeCount = stripeCount;
return this;
}
/**
* Whether or not the export should be striped.
* @return value or {@code null} for none
*/
public java.lang.Boolean getStriped() {
return striped;
}
/**
* Whether or not the export should be striped.
* @param striped striped or {@code null} for none
*/
public BakExportOptions setStriped(java.lang.Boolean striped) {
this.striped = striped;
return this;
}
@Override
public BakExportOptions set(String fieldName, Object value) {
return (BakExportOptions) super.set(fieldName, value);
}
@Override
public BakExportOptions clone() {
return (BakExportOptions) super.clone();
}
}
/**
* Options for exporting data as CSV. `MySQL` and `PostgreSQL` instances only.
*/
public static final class CsvExportOptions extends com.google.api.client.json.GenericJson {
/**
* Specifies the character that should appear before a data character that needs to be escaped.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String escapeCharacter;
/**
* Specifies the character that separates columns within each row (line) of the file.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String fieldsTerminatedBy;
/**
* This is used to separate lines. If a line does not contain all fields, the rest of the columns
* are set to their default values.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String linesTerminatedBy;
/**
* Specifies the quoting character to be used when a data value is quoted.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String quoteCharacter;
/**
* The select query used to extract the data.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String selectQuery;
/**
* Specifies the character that should appear before a data character that needs to be escaped.
* @return value or {@code null} for none
*/
public java.lang.String getEscapeCharacter() {
return escapeCharacter;
}
/**
* Specifies the character that should appear before a data character that needs to be escaped.
* @param escapeCharacter escapeCharacter or {@code null} for none
*/
public CsvExportOptions setEscapeCharacter(java.lang.String escapeCharacter) {
this.escapeCharacter = escapeCharacter;
return this;
}
/**
* Specifies the character that separates columns within each row (line) of the file.
* @return value or {@code null} for none
*/
public java.lang.String getFieldsTerminatedBy() {
return fieldsTerminatedBy;
}
/**
* Specifies the character that separates columns within each row (line) of the file.
* @param fieldsTerminatedBy fieldsTerminatedBy or {@code null} for none
*/
public CsvExportOptions setFieldsTerminatedBy(java.lang.String fieldsTerminatedBy) {
this.fieldsTerminatedBy = fieldsTerminatedBy;
return this;
}
/**
* This is used to separate lines. If a line does not contain all fields, the rest of the columns
* are set to their default values.
* @return value or {@code null} for none
*/
public java.lang.String getLinesTerminatedBy() {
return linesTerminatedBy;
}
/**
* This is used to separate lines. If a line does not contain all fields, the rest of the columns
* are set to their default values.
* @param linesTerminatedBy linesTerminatedBy or {@code null} for none
*/
public CsvExportOptions setLinesTerminatedBy(java.lang.String linesTerminatedBy) {
this.linesTerminatedBy = linesTerminatedBy;
return this;
}
/**
* Specifies the quoting character to be used when a data value is quoted.
* @return value or {@code null} for none
*/
public java.lang.String getQuoteCharacter() {
return quoteCharacter;
}
/**
* Specifies the quoting character to be used when a data value is quoted.
* @param quoteCharacter quoteCharacter or {@code null} for none
*/
public CsvExportOptions setQuoteCharacter(java.lang.String quoteCharacter) {
this.quoteCharacter = quoteCharacter;
return this;
}
/**
* The select query used to extract the data.
* @return value or {@code null} for none
*/
public java.lang.String getSelectQuery() {
return selectQuery;
}
/**
* The select query used to extract the data.
* @param selectQuery selectQuery or {@code null} for none
*/
public CsvExportOptions setSelectQuery(java.lang.String selectQuery) {
this.selectQuery = selectQuery;
return this;
}
@Override
public CsvExportOptions set(String fieldName, Object value) {
return (CsvExportOptions) super.set(fieldName, value);
}
@Override
public CsvExportOptions clone() {
return (CsvExportOptions) super.clone();
}
}
/**
* Options for exporting data as SQL statements.
*/
public static final class SqlExportOptions extends com.google.api.client.json.GenericJson {
/**
* Options for exporting from MySQL.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private MysqlExportOptions mysqlExportOptions;
/**
* Optional. Whether or not the export should be parallel.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean parallel;
/**
* Options for exporting from a Cloud SQL for PostgreSQL instance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private PostgresExportOptions postgresExportOptions;
/**
* Export only schemas.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean schemaOnly;
/**
* Tables to export, or that were exported, from the specified database. If you specify tables,
* specify one and only one database. For PostgreSQL instances, you can specify only one table.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> tables;
/**
* Optional. The number of threads to use for parallel export.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer threads;
/**
* Options for exporting from MySQL.
* @return value or {@code null} for none
*/
public MysqlExportOptions getMysqlExportOptions() {
return mysqlExportOptions;
}
/**
* Options for exporting from MySQL.
* @param mysqlExportOptions mysqlExportOptions or {@code null} for none
*/
public SqlExportOptions setMysqlExportOptions(MysqlExportOptions mysqlExportOptions) {
this.mysqlExportOptions = mysqlExportOptions;
return this;
}
/**
* Optional. Whether or not the export should be parallel.
* @return value or {@code null} for none
*/
public java.lang.Boolean getParallel() {
return parallel;
}
/**
* Optional. Whether or not the export should be parallel.
* @param parallel parallel or {@code null} for none
*/
public SqlExportOptions setParallel(java.lang.Boolean parallel) {
this.parallel = parallel;
return this;
}
/**
* Options for exporting from a Cloud SQL for PostgreSQL instance.
* @return value or {@code null} for none
*/
public PostgresExportOptions getPostgresExportOptions() {
return postgresExportOptions;
}
/**
* Options for exporting from a Cloud SQL for PostgreSQL instance.
* @param postgresExportOptions postgresExportOptions or {@code null} for none
*/
public SqlExportOptions setPostgresExportOptions(PostgresExportOptions postgresExportOptions) {
this.postgresExportOptions = postgresExportOptions;
return this;
}
/**
* Export only schemas.
* @return value or {@code null} for none
*/
public java.lang.Boolean getSchemaOnly() {
return schemaOnly;
}
/**
* Export only schemas.
* @param schemaOnly schemaOnly or {@code null} for none
*/
public SqlExportOptions setSchemaOnly(java.lang.Boolean schemaOnly) {
this.schemaOnly = schemaOnly;
return this;
}
/**
* Tables to export, or that were exported, from the specified database. If you specify tables,
* specify one and only one database. For PostgreSQL instances, you can specify only one table.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getTables() {
return tables;
}
/**
* Tables to export, or that were exported, from the specified database. If you specify tables,
* specify one and only one database. For PostgreSQL instances, you can specify only one table.
* @param tables tables or {@code null} for none
*/
public SqlExportOptions setTables(java.util.List<java.lang.String> tables) {
this.tables = tables;
return this;
}
/**
* Optional. The number of threads to use for parallel export.
* @return value or {@code null} for none
*/
public java.lang.Integer getThreads() {
return threads;
}
/**
* Optional. The number of threads to use for parallel export.
* @param threads threads or {@code null} for none
*/
public SqlExportOptions setThreads(java.lang.Integer threads) {
this.threads = threads;
return this;
}
@Override
public SqlExportOptions set(String fieldName, Object value) {
return (SqlExportOptions) super.set(fieldName, value);
}
@Override
public SqlExportOptions clone() {
return (SqlExportOptions) super.clone();
}
/**
* Options for exporting from MySQL.
*/
public static final class MysqlExportOptions extends com.google.api.client.json.GenericJson {
/**
* Option to include SQL statement required to set up replication. If set to `1`, the dump file
* includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is
* set to ON. If set to `2`, the CHANGE MASTER TO statement is written as a SQL comment and has no
* effect. If set to any value other than `1`, --set-gtid-purged is set to OFF.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer masterData;
/**
* Option to include SQL statement required to set up replication. If set to `1`, the dump file
* includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is
* set to ON. If set to `2`, the CHANGE MASTER TO statement is written as a SQL comment and has no
* effect. If set to any value other than `1`, --set-gtid-purged is set to OFF.
* @return value or {@code null} for none
*/
public java.lang.Integer getMasterData() {
return masterData;
}
/**
* Option to include SQL statement required to set up replication. If set to `1`, the dump file
* includes a CHANGE MASTER TO statement with the binary log coordinates, and --set-gtid-purged is
* set to ON. If set to `2`, the CHANGE MASTER TO statement is written as a SQL comment and has no
* effect. If set to any value other than `1`, --set-gtid-purged is set to OFF.
* @param masterData masterData or {@code null} for none
*/
public MysqlExportOptions setMasterData(java.lang.Integer masterData) {
this.masterData = masterData;
return this;
}
@Override
public MysqlExportOptions set(String fieldName, Object value) {
return (MysqlExportOptions) super.set(fieldName, value);
}
@Override
public MysqlExportOptions clone() {
return (MysqlExportOptions) super.clone();
}
}
/**
* Options for exporting from a Cloud SQL for PostgreSQL instance.
*/
public static final class PostgresExportOptions extends com.google.api.client.json.GenericJson {
/**
* Optional. Use this option to include DROP SQL statements. Use these statements to delete
* database objects before running the import operation.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean clean;
/**
* Optional. Option to include an IF EXISTS SQL statement with each DROP statement produced by
* clean.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean ifExists;
/**
* Optional. Use this option to include DROP SQL statements. Use these statements to delete
* database objects before running the import operation.
* @return value or {@code null} for none
*/
public java.lang.Boolean getClean() {
return clean;
}
/**
* Optional. Use this option to include DROP SQL statements. Use these statements to delete
* database objects before running the import operation.
* @param clean clean or {@code null} for none
*/
public PostgresExportOptions setClean(java.lang.Boolean clean) {
this.clean = clean;
return this;
}
/**
* Optional. Option to include an IF EXISTS SQL statement with each DROP statement produced by
* clean.
* @return value or {@code null} for none
*/
public java.lang.Boolean getIfExists() {
return ifExists;
}
/**
* Optional. Option to include an IF EXISTS SQL statement with each DROP statement produced by
* clean.
* @param ifExists ifExists or {@code null} for none
*/
public PostgresExportOptions setIfExists(java.lang.Boolean ifExists) {
this.ifExists = ifExists;
return this;
}
@Override
public PostgresExportOptions set(String fieldName, Object value) {
return (PostgresExportOptions) super.set(fieldName, value);
}
@Override
public PostgresExportOptions clone() {
return (PostgresExportOptions) super.clone();
}
}
}
/**
* Optional. Export parameters specific to SQL Server TDE certificates
*/
public static final class TdeExportOptions extends com.google.api.client.json.GenericJson {
/**
* Required. Path to the TDE certificate public key in the form gs://bucketName/fileName. The
* instance must have write access to the bucket. Applicable only for SQL Server instances.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String certificatePath;
/**
* Required. Certificate name. Applicable only for SQL Server instances.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String name;
/**
* Required. Password that encrypts the private key.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String privateKeyPassword;
/**
* Required. Path to the TDE certificate private key in the form gs://bucketName/fileName. The
* instance must have write access to the location. Applicable only for SQL Server instances.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String privateKeyPath;
/**
* Required. Path to the TDE certificate public key in the form gs://bucketName/fileName. The
* instance must have write access to the bucket. Applicable only for SQL Server instances.
* @return value or {@code null} for none
*/
public java.lang.String getCertificatePath() {
return certificatePath;
}
/**
* Required. Path to the TDE certificate public key in the form gs://bucketName/fileName. The
* instance must have write access to the bucket. Applicable only for SQL Server instances.
* @param certificatePath certificatePath or {@code null} for none
*/
public TdeExportOptions setCertificatePath(java.lang.String certificatePath) {
this.certificatePath = certificatePath;
return this;
}
/**
* Required. Certificate name. Applicable only for SQL Server instances.
* @return value or {@code null} for none
*/
public java.lang.String getName() {
return name;
}
/**
* Required. Certificate name. Applicable only for SQL Server instances.
* @param name name or {@code null} for none
*/
public TdeExportOptions setName(java.lang.String name) {
this.name = name;
return this;
}
/**
* Required. Password that encrypts the private key.
* @return value or {@code null} for none
*/
public java.lang.String getPrivateKeyPassword() {
return privateKeyPassword;
}
/**
* Required. Password that encrypts the private key.
* @param privateKeyPassword privateKeyPassword or {@code null} for none
*/
public TdeExportOptions setPrivateKeyPassword(java.lang.String privateKeyPassword) {
this.privateKeyPassword = privateKeyPassword;
return this;
}
/**
* Required. Path to the TDE certificate private key in the form gs://bucketName/fileName. The
* instance must have write access to the location. Applicable only for SQL Server instances.
* @return value or {@code null} for none
*/
public java.lang.String getPrivateKeyPath() {
return privateKeyPath;
}
/**
* Required. Path to the TDE certificate private key in the form gs://bucketName/fileName. The
* instance must have write access to the location. Applicable only for SQL Server instances.
* @param privateKeyPath privateKeyPath or {@code null} for none
*/
public TdeExportOptions setPrivateKeyPath(java.lang.String privateKeyPath) {
this.privateKeyPath = privateKeyPath;
return this;
}
@Override
public TdeExportOptions set(String fieldName, Object value) {
return (TdeExportOptions) super.set(fieldName, value);
}
@Override
public TdeExportOptions clone() {
return (TdeExportOptions) super.clone();
}
}
}
|
googleapis/google-cloud-java | 35,798 | java-video-intelligence/proto-google-cloud-video-intelligence-v1beta2/src/main/java/com/google/cloud/videointelligence/v1beta2/AnnotateVideoResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1beta2/video_intelligence.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.videointelligence.v1beta2;
/**
*
*
* <pre>
* Video annotation response. Included in the `response`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1beta2.AnnotateVideoResponse}
*/
public final class AnnotateVideoResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1beta2.AnnotateVideoResponse)
AnnotateVideoResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use AnnotateVideoResponse.newBuilder() to construct.
private AnnotateVideoResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AnnotateVideoResponse() {
annotationResults_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AnnotateVideoResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1beta2_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1beta2_AnnotateVideoResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse.class,
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse.Builder.class);
}
public static final int ANNOTATION_RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults>
annotationResults_;
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults>
getAnnotationResultsList() {
return annotationResults_;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public java.util.List<
? extends com.google.cloud.videointelligence.v1beta2.VideoAnnotationResultsOrBuilder>
getAnnotationResultsOrBuilderList() {
return annotationResults_;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public int getAnnotationResultsCount() {
return annotationResults_.size();
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults getAnnotationResults(
int index) {
return annotationResults_.get(index);
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationResultsOrBuilder
getAnnotationResultsOrBuilder(int index) {
return annotationResults_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < annotationResults_.size(); i++) {
output.writeMessage(1, annotationResults_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < annotationResults_.size(); i++) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(1, annotationResults_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse other =
(com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse) obj;
if (!getAnnotationResultsList().equals(other.getAnnotationResultsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAnnotationResultsCount() > 0) {
hash = (37 * hash) + ANNOTATION_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getAnnotationResultsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Video annotation response. Included in the `response`
* field of the `Operation` returned by the `GetOperation`
* call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1beta2.AnnotateVideoResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1beta2.AnnotateVideoResponse)
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1beta2_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1beta2_AnnotateVideoResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse.class,
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse.Builder.class);
}
// Construct using com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (annotationResultsBuilder_ == null) {
annotationResults_ = java.util.Collections.emptyList();
} else {
annotationResults_ = null;
annotationResultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1beta2_AnnotateVideoResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse
getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse build() {
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse buildPartial() {
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse result =
new com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse result) {
if (annotationResultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
annotationResults_ = java.util.Collections.unmodifiableList(annotationResults_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.annotationResults_ = annotationResults_;
} else {
result.annotationResults_ = annotationResultsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse result) {
int from_bitField0_ = bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse) {
return mergeFrom((com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse other) {
if (other
== com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse.getDefaultInstance())
return this;
if (annotationResultsBuilder_ == null) {
if (!other.annotationResults_.isEmpty()) {
if (annotationResults_.isEmpty()) {
annotationResults_ = other.annotationResults_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAnnotationResultsIsMutable();
annotationResults_.addAll(other.annotationResults_);
}
onChanged();
}
} else {
if (!other.annotationResults_.isEmpty()) {
if (annotationResultsBuilder_.isEmpty()) {
annotationResultsBuilder_.dispose();
annotationResultsBuilder_ = null;
annotationResults_ = other.annotationResults_;
bitField0_ = (bitField0_ & ~0x00000001);
annotationResultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAnnotationResultsFieldBuilder()
: null;
} else {
annotationResultsBuilder_.addAllMessages(other.annotationResults_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults m =
input.readMessage(
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.parser(),
extensionRegistry);
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(m);
} else {
annotationResultsBuilder_.addMessage(m);
}
break;
} // case 10
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults>
annotationResults_ = java.util.Collections.emptyList();
private void ensureAnnotationResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
annotationResults_ =
new java.util.ArrayList<
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults>(
annotationResults_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResultsOrBuilder>
annotationResultsBuilder_;
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults>
getAnnotationResultsList() {
if (annotationResultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(annotationResults_);
} else {
return annotationResultsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public int getAnnotationResultsCount() {
if (annotationResultsBuilder_ == null) {
return annotationResults_.size();
} else {
return annotationResultsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults getAnnotationResults(
int index) {
if (annotationResultsBuilder_ == null) {
return annotationResults_.get(index);
} else {
return annotationResultsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder setAnnotationResults(
int index, com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.set(index, value);
onChanged();
} else {
annotationResultsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder setAnnotationResults(
int index,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.Builder builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.set(index, builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.add(value);
onChanged();
} else {
annotationResultsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
int index, com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults value) {
if (annotationResultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAnnotationResultsIsMutable();
annotationResults_.add(index, value);
onChanged();
} else {
annotationResultsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.Builder builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAnnotationResults(
int index,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.Builder builderForValue) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.add(index, builderForValue.build());
onChanged();
} else {
annotationResultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder addAllAnnotationResults(
java.lang.Iterable<
? extends com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults>
values) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, annotationResults_);
onChanged();
} else {
annotationResultsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder clearAnnotationResults() {
if (annotationResultsBuilder_ == null) {
annotationResults_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
annotationResultsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public Builder removeAnnotationResults(int index) {
if (annotationResultsBuilder_ == null) {
ensureAnnotationResultsIsMutable();
annotationResults_.remove(index);
onChanged();
} else {
annotationResultsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.Builder
getAnnotationResultsBuilder(int index) {
return getAnnotationResultsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationResultsOrBuilder
getAnnotationResultsOrBuilder(int index) {
if (annotationResultsBuilder_ == null) {
return annotationResults_.get(index);
} else {
return annotationResultsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<
? extends com.google.cloud.videointelligence.v1beta2.VideoAnnotationResultsOrBuilder>
getAnnotationResultsOrBuilderList() {
if (annotationResultsBuilder_ != null) {
return annotationResultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(annotationResults_);
}
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.Builder
addAnnotationResultsBuilder() {
return getAnnotationResultsFieldBuilder()
.addBuilder(
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults
.getDefaultInstance());
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.Builder
addAnnotationResultsBuilder(int index) {
return getAnnotationResultsFieldBuilder()
.addBuilder(
index,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults
.getDefaultInstance());
}
/**
*
*
* <pre>
* Annotation results for all videos specified in `AnnotateVideoRequest`.
* </pre>
*
* <code>
* repeated .google.cloud.videointelligence.v1beta2.VideoAnnotationResults annotation_results = 1;
* </code>
*/
public java.util.List<com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.Builder>
getAnnotationResultsBuilderList() {
return getAnnotationResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResultsOrBuilder>
getAnnotationResultsFieldBuilder() {
if (annotationResultsBuilder_ == null) {
annotationResultsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResults.Builder,
com.google.cloud.videointelligence.v1beta2.VideoAnnotationResultsOrBuilder>(
annotationResults_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
annotationResults_ = null;
}
return annotationResultsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1beta2.AnnotateVideoResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta2.AnnotateVideoResponse)
private static final com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse();
}
public static com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AnnotateVideoResponse> PARSER =
new com.google.protobuf.AbstractParser<AnnotateVideoResponse>() {
@java.lang.Override
public AnnotateVideoResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AnnotateVideoResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AnnotateVideoResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1beta2.AnnotateVideoResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,012 | java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/GlobalForwardingRulesStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.GlobalForwardingRulesClient.ListPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.httpjson.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.DeleteGlobalForwardingRuleRequest;
import com.google.cloud.compute.v1.ForwardingRule;
import com.google.cloud.compute.v1.ForwardingRuleList;
import com.google.cloud.compute.v1.GetGlobalForwardingRuleRequest;
import com.google.cloud.compute.v1.InsertGlobalForwardingRuleRequest;
import com.google.cloud.compute.v1.ListGlobalForwardingRulesRequest;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.PatchGlobalForwardingRuleRequest;
import com.google.cloud.compute.v1.SetLabelsGlobalForwardingRuleRequest;
import com.google.cloud.compute.v1.SetTargetGlobalForwardingRuleRequest;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link GlobalForwardingRulesStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (compute.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of get:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* GlobalForwardingRulesStubSettings.Builder globalForwardingRulesSettingsBuilder =
* GlobalForwardingRulesStubSettings.newBuilder();
* globalForwardingRulesSettingsBuilder
* .getSettings()
* .setRetrySettings(
* globalForwardingRulesSettingsBuilder
* .getSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* GlobalForwardingRulesStubSettings globalForwardingRulesSettings =
* globalForwardingRulesSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for delete:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* GlobalForwardingRulesStubSettings.Builder globalForwardingRulesSettingsBuilder =
* GlobalForwardingRulesStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* globalForwardingRulesSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class GlobalForwardingRulesStubSettings
extends StubSettings<GlobalForwardingRulesStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/compute")
.add("https://www.googleapis.com/auth/cloud-platform")
.build();
private final UnaryCallSettings<DeleteGlobalForwardingRuleRequest, Operation> deleteSettings;
private final OperationCallSettings<DeleteGlobalForwardingRuleRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings<GetGlobalForwardingRuleRequest, ForwardingRule> getSettings;
private final UnaryCallSettings<InsertGlobalForwardingRuleRequest, Operation> insertSettings;
private final OperationCallSettings<InsertGlobalForwardingRuleRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings<
ListGlobalForwardingRulesRequest, ForwardingRuleList, ListPagedResponse>
listSettings;
private final UnaryCallSettings<PatchGlobalForwardingRuleRequest, Operation> patchSettings;
private final OperationCallSettings<PatchGlobalForwardingRuleRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings<SetLabelsGlobalForwardingRuleRequest, Operation>
setLabelsSettings;
private final OperationCallSettings<SetLabelsGlobalForwardingRuleRequest, Operation, Operation>
setLabelsOperationSettings;
private final UnaryCallSettings<SetTargetGlobalForwardingRuleRequest, Operation>
setTargetSettings;
private final OperationCallSettings<SetTargetGlobalForwardingRuleRequest, Operation, Operation>
setTargetOperationSettings;
private static final PagedListDescriptor<
ListGlobalForwardingRulesRequest, ForwardingRuleList, ForwardingRule>
LIST_PAGE_STR_DESC =
new PagedListDescriptor<
ListGlobalForwardingRulesRequest, ForwardingRuleList, ForwardingRule>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListGlobalForwardingRulesRequest injectToken(
ListGlobalForwardingRulesRequest payload, String token) {
return ListGlobalForwardingRulesRequest.newBuilder(payload)
.setPageToken(token)
.build();
}
@Override
public ListGlobalForwardingRulesRequest injectPageSize(
ListGlobalForwardingRulesRequest payload, int pageSize) {
return ListGlobalForwardingRulesRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListGlobalForwardingRulesRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(ForwardingRuleList payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<ForwardingRule> extractResources(ForwardingRuleList payload) {
return payload.getItemsList();
}
};
private static final PagedListResponseFactory<
ListGlobalForwardingRulesRequest, ForwardingRuleList, ListPagedResponse>
LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
ListGlobalForwardingRulesRequest, ForwardingRuleList, ListPagedResponse>() {
@Override
public ApiFuture<ListPagedResponse> getFuturePagedResponse(
UnaryCallable<ListGlobalForwardingRulesRequest, ForwardingRuleList> callable,
ListGlobalForwardingRulesRequest request,
ApiCallContext context,
ApiFuture<ForwardingRuleList> futureResponse) {
PageContext<ListGlobalForwardingRulesRequest, ForwardingRuleList, ForwardingRule>
pageContext = PageContext.create(callable, LIST_PAGE_STR_DESC, request, context);
return ListPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to delete. */
public UnaryCallSettings<DeleteGlobalForwardingRuleRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the object with the settings used for calls to delete. */
public OperationCallSettings<DeleteGlobalForwardingRuleRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the object with the settings used for calls to get. */
public UnaryCallSettings<GetGlobalForwardingRuleRequest, ForwardingRule> getSettings() {
return getSettings;
}
/** Returns the object with the settings used for calls to insert. */
public UnaryCallSettings<InsertGlobalForwardingRuleRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the object with the settings used for calls to insert. */
public OperationCallSettings<InsertGlobalForwardingRuleRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the object with the settings used for calls to list. */
public PagedCallSettings<ListGlobalForwardingRulesRequest, ForwardingRuleList, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the object with the settings used for calls to patch. */
public UnaryCallSettings<PatchGlobalForwardingRuleRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the object with the settings used for calls to patch. */
public OperationCallSettings<PatchGlobalForwardingRuleRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the object with the settings used for calls to setLabels. */
public UnaryCallSettings<SetLabelsGlobalForwardingRuleRequest, Operation> setLabelsSettings() {
return setLabelsSettings;
}
/** Returns the object with the settings used for calls to setLabels. */
public OperationCallSettings<SetLabelsGlobalForwardingRuleRequest, Operation, Operation>
setLabelsOperationSettings() {
return setLabelsOperationSettings;
}
/** Returns the object with the settings used for calls to setTarget. */
public UnaryCallSettings<SetTargetGlobalForwardingRuleRequest, Operation> setTargetSettings() {
return setTargetSettings;
}
/** Returns the object with the settings used for calls to setTarget. */
public OperationCallSettings<SetTargetGlobalForwardingRuleRequest, Operation, Operation>
setTargetOperationSettings() {
return setTargetOperationSettings;
}
public GlobalForwardingRulesStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonGlobalForwardingRulesStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "compute";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "compute.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "compute.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultHttpJsonTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(GlobalForwardingRulesStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected GlobalForwardingRulesStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
deleteSettings = settingsBuilder.deleteSettings().build();
deleteOperationSettings = settingsBuilder.deleteOperationSettings().build();
getSettings = settingsBuilder.getSettings().build();
insertSettings = settingsBuilder.insertSettings().build();
insertOperationSettings = settingsBuilder.insertOperationSettings().build();
listSettings = settingsBuilder.listSettings().build();
patchSettings = settingsBuilder.patchSettings().build();
patchOperationSettings = settingsBuilder.patchOperationSettings().build();
setLabelsSettings = settingsBuilder.setLabelsSettings().build();
setLabelsOperationSettings = settingsBuilder.setLabelsOperationSettings().build();
setTargetSettings = settingsBuilder.setTargetSettings().build();
setTargetOperationSettings = settingsBuilder.setTargetOperationSettings().build();
}
/** Builder for GlobalForwardingRulesStubSettings. */
public static class Builder
extends StubSettings.Builder<GlobalForwardingRulesStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<DeleteGlobalForwardingRuleRequest, Operation>
deleteSettings;
private final OperationCallSettings.Builder<
DeleteGlobalForwardingRuleRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings.Builder<GetGlobalForwardingRuleRequest, ForwardingRule>
getSettings;
private final UnaryCallSettings.Builder<InsertGlobalForwardingRuleRequest, Operation>
insertSettings;
private final OperationCallSettings.Builder<
InsertGlobalForwardingRuleRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings.Builder<
ListGlobalForwardingRulesRequest, ForwardingRuleList, ListPagedResponse>
listSettings;
private final UnaryCallSettings.Builder<PatchGlobalForwardingRuleRequest, Operation>
patchSettings;
private final OperationCallSettings.Builder<
PatchGlobalForwardingRuleRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings.Builder<SetLabelsGlobalForwardingRuleRequest, Operation>
setLabelsSettings;
private final OperationCallSettings.Builder<
SetLabelsGlobalForwardingRuleRequest, Operation, Operation>
setLabelsOperationSettings;
private final UnaryCallSettings.Builder<SetTargetGlobalForwardingRuleRequest, Operation>
setTargetSettings;
private final OperationCallSettings.Builder<
SetTargetGlobalForwardingRuleRequest, Operation, Operation>
setTargetOperationSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("no_retry_1_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("retry_policy_0_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
deleteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteOperationSettings = OperationCallSettings.newBuilder();
getSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertOperationSettings = OperationCallSettings.newBuilder();
listSettings = PagedCallSettings.newBuilder(LIST_PAGE_STR_FACT);
patchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
patchOperationSettings = OperationCallSettings.newBuilder();
setLabelsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
setLabelsOperationSettings = OperationCallSettings.newBuilder();
setTargetSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
setTargetOperationSettings = OperationCallSettings.newBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
deleteSettings,
getSettings,
insertSettings,
listSettings,
patchSettings,
setLabelsSettings,
setTargetSettings);
initDefaults(this);
}
protected Builder(GlobalForwardingRulesStubSettings settings) {
super(settings);
deleteSettings = settings.deleteSettings.toBuilder();
deleteOperationSettings = settings.deleteOperationSettings.toBuilder();
getSettings = settings.getSettings.toBuilder();
insertSettings = settings.insertSettings.toBuilder();
insertOperationSettings = settings.insertOperationSettings.toBuilder();
listSettings = settings.listSettings.toBuilder();
patchSettings = settings.patchSettings.toBuilder();
patchOperationSettings = settings.patchOperationSettings.toBuilder();
setLabelsSettings = settings.setLabelsSettings.toBuilder();
setLabelsOperationSettings = settings.setLabelsOperationSettings.toBuilder();
setTargetSettings = settings.setTargetSettings.toBuilder();
setTargetOperationSettings = settings.setTargetOperationSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
deleteSettings,
getSettings,
insertSettings,
listSettings,
patchSettings,
setLabelsSettings,
setTargetSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.deleteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.getSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.insertSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.patchSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.setLabelsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.setTargetSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deleteOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteGlobalForwardingRuleRequest, OperationSnapshot>
newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.insertOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<InsertGlobalForwardingRuleRequest, OperationSnapshot>
newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.patchOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<PatchGlobalForwardingRuleRequest, OperationSnapshot>
newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.setLabelsOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<SetLabelsGlobalForwardingRuleRequest, OperationSnapshot>
newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.setTargetOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<SetTargetGlobalForwardingRuleRequest, OperationSnapshot>
newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to delete. */
public UnaryCallSettings.Builder<DeleteGlobalForwardingRuleRequest, Operation>
deleteSettings() {
return deleteSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public OperationCallSettings.Builder<DeleteGlobalForwardingRuleRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the builder for the settings used for calls to get. */
public UnaryCallSettings.Builder<GetGlobalForwardingRuleRequest, ForwardingRule> getSettings() {
return getSettings;
}
/** Returns the builder for the settings used for calls to insert. */
public UnaryCallSettings.Builder<InsertGlobalForwardingRuleRequest, Operation>
insertSettings() {
return insertSettings;
}
/** Returns the builder for the settings used for calls to insert. */
public OperationCallSettings.Builder<InsertGlobalForwardingRuleRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the builder for the settings used for calls to list. */
public PagedCallSettings.Builder<
ListGlobalForwardingRulesRequest, ForwardingRuleList, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public UnaryCallSettings.Builder<PatchGlobalForwardingRuleRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public OperationCallSettings.Builder<PatchGlobalForwardingRuleRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the builder for the settings used for calls to setLabels. */
public UnaryCallSettings.Builder<SetLabelsGlobalForwardingRuleRequest, Operation>
setLabelsSettings() {
return setLabelsSettings;
}
/** Returns the builder for the settings used for calls to setLabels. */
public OperationCallSettings.Builder<SetLabelsGlobalForwardingRuleRequest, Operation, Operation>
setLabelsOperationSettings() {
return setLabelsOperationSettings;
}
/** Returns the builder for the settings used for calls to setTarget. */
public UnaryCallSettings.Builder<SetTargetGlobalForwardingRuleRequest, Operation>
setTargetSettings() {
return setTargetSettings;
}
/** Returns the builder for the settings used for calls to setTarget. */
public OperationCallSettings.Builder<SetTargetGlobalForwardingRuleRequest, Operation, Operation>
setTargetOperationSettings() {
return setTargetOperationSettings;
}
@Override
public GlobalForwardingRulesStubSettings build() throws IOException {
return new GlobalForwardingRulesStubSettings(this);
}
}
}
|
apache/incubator-seata | 35,933 | server/src/test/java/org/apache/seata/server/session/FileSessionManagerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.seata.server.session;
import org.apache.commons.lang.time.DateUtils;
import org.apache.seata.common.XID;
import org.apache.seata.common.loader.EnhancedServiceLoader;
import org.apache.seata.common.result.PageResult;
import org.apache.seata.common.store.SessionMode;
import org.apache.seata.common.util.CollectionUtils;
import org.apache.seata.common.util.UUIDGenerator;
import org.apache.seata.core.model.BranchStatus;
import org.apache.seata.core.model.BranchType;
import org.apache.seata.core.model.GlobalStatus;
import org.apache.seata.core.model.LockStatus;
import org.apache.seata.server.BaseSpringBootTest;
import org.apache.seata.server.console.entity.param.GlobalSessionParam;
import org.apache.seata.server.console.entity.vo.GlobalSessionVO;
import org.apache.seata.server.console.service.BranchSessionService;
import org.apache.seata.server.console.service.GlobalSessionService;
import org.apache.seata.server.storage.file.session.FileSessionManager;
import org.apache.seata.server.util.StoreUtil;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;
import org.springframework.context.ApplicationContext;
import javax.annotation.Resource;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.stream.Stream;
import static org.apache.seata.common.DefaultValues.DEFAULT_TX_GROUP;
/**
* The type File based session manager test.
*
* @since 2019 /1/22
*/
public class FileSessionManagerTest extends BaseSpringBootTest {
private static volatile List<SessionManager> sessionManagerList;
@Resource(type = GlobalSessionService.class)
private GlobalSessionService globalSessionService;
@Resource(type = BranchSessionService.class)
private BranchSessionService branchSessionService;
@BeforeAll
public static void setUp(ApplicationContext context) {
StoreUtil.deleteDataFile();
try {
EnhancedServiceLoader.unloadAll();
sessionManagerList =
Arrays.asList(new FileSessionManager("root.data", "."), new FileSessionManager("test", null));
} catch (IOException e) {
e.printStackTrace();
}
}
@BeforeEach
public void setUp() {
SessionHolder.init(SessionMode.FILE);
}
@AfterEach
public void tearDown() {
SessionHolder.destroy();
}
/**
* Add global session test.
*
* @param globalSession the global session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionProvider")
public void addGlobalSessionTest(GlobalSession globalSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.addGlobalSession(globalSession);
sessionManager.removeGlobalSession(globalSession);
}
}
/**
* Find global session test.
*
* @param globalSession the global session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionProvider")
public void findGlobalSessionTest(GlobalSession globalSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.addGlobalSession(globalSession);
GlobalSession expected = sessionManager.findGlobalSession(globalSession.getXid());
Assertions.assertNotNull(expected);
Assertions.assertEquals(expected.getTransactionId(), globalSession.getTransactionId());
Assertions.assertEquals(expected.getApplicationId(), globalSession.getApplicationId());
Assertions.assertEquals(expected.getTransactionServiceGroup(), globalSession.getTransactionServiceGroup());
Assertions.assertEquals(expected.getTransactionName(), globalSession.getTransactionName());
Assertions.assertEquals(expected.getTransactionId(), globalSession.getTransactionId());
Assertions.assertEquals(expected.getStatus(), globalSession.getStatus());
sessionManager.removeGlobalSession(globalSession);
}
}
/**
* Update global session status test.
*
* @param globalSession the global session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionProvider")
public void updateGlobalSessionStatusTest(GlobalSession globalSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.addGlobalSession(globalSession);
globalSession.setStatus(GlobalStatus.Finished);
sessionManager.updateGlobalSessionStatus(globalSession, GlobalStatus.Finished);
GlobalSession expected = sessionManager.findGlobalSession(globalSession.getXid());
Assertions.assertNotNull(expected);
Assertions.assertEquals(GlobalStatus.Finished, expected.getStatus());
sessionManager.removeGlobalSession(globalSession);
}
}
/**
* Remove global session test.
*
* @param globalSession the global session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionProvider")
public void removeGlobalSessionTest(GlobalSession globalSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.addGlobalSession(globalSession);
sessionManager.removeGlobalSession(globalSession);
GlobalSession expected = sessionManager.findGlobalSession(globalSession.getXid());
Assertions.assertNull(expected);
}
}
/**
* Add branch session test.
*
* @param globalSession the global session
* @param branchSession the branch session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("branchSessionProvider")
public void addBranchSessionTest(GlobalSession globalSession, BranchSession branchSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.addGlobalSession(globalSession);
sessionManager.addBranchSession(globalSession, branchSession);
sessionManager.removeBranchSession(globalSession, branchSession);
sessionManager.removeGlobalSession(globalSession);
}
}
/**
* Update branch session status test.
*
* @param globalSession the global session
* @param branchSession the branch session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("branchSessionProvider")
public void updateBranchSessionStatusTest(GlobalSession globalSession, BranchSession branchSession)
throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.addGlobalSession(globalSession);
sessionManager.addBranchSession(globalSession, branchSession);
sessionManager.updateBranchSessionStatus(branchSession, BranchStatus.PhaseTwo_Committed);
sessionManager.removeBranchSession(globalSession, branchSession);
sessionManager.removeGlobalSession(globalSession);
}
}
/**
* Remove branch session test.
*
* @param globalSession the global session
* @param branchSession the branch session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("branchSessionProvider")
public void removeBranchSessionTest(GlobalSession globalSession, BranchSession branchSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.addGlobalSession(globalSession);
sessionManager.addBranchSession(globalSession, branchSession);
sessionManager.removeBranchSession(globalSession, branchSession);
sessionManager.removeGlobalSession(globalSession);
}
}
/**
* All sessions test.
*
* @param globalSessions the global sessions
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionsProvider")
public void allSessionsTest(List<GlobalSession> globalSessions) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
for (GlobalSession globalSession : globalSessions) {
sessionManager.addGlobalSession(globalSession);
}
Collection<GlobalSession> expectedGlobalSessions = sessionManager.allSessions();
Assertions.assertNotNull(expectedGlobalSessions);
Assertions.assertEquals(2, expectedGlobalSessions.size());
for (GlobalSession globalSession : globalSessions) {
sessionManager.removeGlobalSession(globalSession);
}
}
}
/**
* Find global sessions test.
*
* @param globalSessions the global sessions
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionsProvider")
public void findGlobalSessionsTest(List<GlobalSession> globalSessions) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
for (GlobalSession globalSession : globalSessions) {
sessionManager.addGlobalSession(globalSession);
}
SessionCondition sessionCondition = new SessionCondition(30 * 24 * 3600);
Collection<GlobalSession> expectedGlobalSessions = sessionManager.findGlobalSessions(sessionCondition);
Assertions.assertNotNull(expectedGlobalSessions);
Assertions.assertEquals(2, expectedGlobalSessions.size());
SessionCondition sessionCondition1 =
new SessionCondition(globalSessions.get(0).getXid());
expectedGlobalSessions = sessionManager.findGlobalSessions(sessionCondition1);
Assertions.assertNotNull(expectedGlobalSessions);
Assertions.assertEquals(1, expectedGlobalSessions.size());
sessionCondition1.setTransactionId(globalSessions.get(0).getTransactionId());
expectedGlobalSessions = sessionManager.findGlobalSessions(sessionCondition1);
Assertions.assertNotNull(expectedGlobalSessions);
Assertions.assertEquals(1, expectedGlobalSessions.size());
sessionCondition1.setStatuses(globalSessions.get(0).getStatus());
expectedGlobalSessions = sessionManager.findGlobalSessions(sessionCondition1);
Assertions.assertNotNull(expectedGlobalSessions);
Assertions.assertEquals(1, expectedGlobalSessions.size());
for (GlobalSession globalSession : globalSessions) {
sessionManager.removeGlobalSession(globalSession);
}
}
}
/**
* Find global sessions with PageResult test.
*
* @param globalSessions the global sessions
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionsWithPageResultProvider")
public void findGlobalSessionsWithPageResultTest(List<GlobalSession> globalSessions) throws Exception {
try {
final SessionManager sessionManager = SessionHolder.getRootSessionManager();
// make sure sessionMaanager is empty
Collection<GlobalSession> sessions = sessionManager.allSessions();
if (CollectionUtils.isNotEmpty(sessions)) {
// FileSessionManager use ConcurrentHashMap is thread safe
for (GlobalSession session : sessions) {
sessionManager.removeGlobalSession(session);
}
}
for (GlobalSession globalSession : globalSessions) {
globalSession.begin();
}
final GlobalSessionParam globalSessionParam = new GlobalSessionParam();
// wrong pageSize or pageNum
Assertions.assertThrows(
IllegalArgumentException.class, () -> globalSessionService.query(globalSessionParam));
// page
globalSessionParam.setPageSize(1);
globalSessionParam.setPageNum(1);
final PageResult<GlobalSessionVO> sizeAndNumTestResult = globalSessionService.query(globalSessionParam);
Assertions.assertEquals(1, sizeAndNumTestResult.getCurrPage());
Assertions.assertEquals(3, sizeAndNumTestResult.getPages());
Assertions.assertEquals(1, sizeAndNumTestResult.getData().size());
Assertions.assertEquals(3, sizeAndNumTestResult.getTotal());
// xid
final GlobalSession firstGlobalSession = globalSessions.get(0);
globalSessionParam.setXid(firstGlobalSession.getXid());
final PageResult<GlobalSessionVO> xidTestResult = globalSessionService.query(globalSessionParam);
Assertions.assertEquals(1, xidTestResult.getData().size());
Assertions.assertEquals(
globalSessionParam.getXid(), xidTestResult.getData().get(0).getXid());
// transaction name
globalSessionParam.setXid(null);
globalSessionParam.setTransactionName("test2");
final PageResult<GlobalSessionVO> transactionNameTestResult =
globalSessionService.query(globalSessionParam);
Assertions.assertEquals(1, transactionNameTestResult.getData().size());
Assertions.assertEquals(
globalSessionParam.getTransactionName(),
transactionNameTestResult.getData().get(0).getTransactionName());
// application id
globalSessionParam.setPageSize(3);
globalSessionParam.setTransactionName(null);
globalSessionParam.setApplicationId("demo-app");
final PageResult<GlobalSessionVO> applicationIdTestResult = globalSessionService.query(globalSessionParam);
Assertions.assertEquals(2, applicationIdTestResult.getData().size());
Assertions.assertEquals(
globalSessionParam.getApplicationId(),
applicationIdTestResult.getData().stream()
.map(GlobalSessionVO::getApplicationId)
.distinct()
.reduce(String::concat)
.orElse(""));
// status
globalSessionParam.setApplicationId(null);
globalSessionParam.setWithBranch(true);
globalSessionParam.setStatus(GlobalStatus.CommitFailed.getCode());
final PageResult<GlobalSessionVO> statusTestResult = globalSessionService.query(globalSessionParam);
Assertions.assertEquals(0, statusTestResult.getData().size());
// with branch
globalSessionParam.setStatus(null);
final PageResult<GlobalSessionVO> withBranchTestResult = globalSessionService.query(globalSessionParam);
Assertions.assertEquals(3, withBranchTestResult.getData().size());
Assertions.assertEquals(3, withBranchTestResult.getData().size());
// timeStart and timeEnd
globalSessionParam.setWithBranch(false);
Assertions.assertEquals(
3, globalSessionService.query(globalSessionParam).getData().size());
globalSessionParam.setTimeStart(DateUtils.addHours(new Date(), 1).getTime());
Assertions.assertEquals(
3, globalSessionService.query(globalSessionParam).getData().size());
globalSessionParam.setTimeStart(DateUtils.addHours(new Date(), -1).getTime());
Assertions.assertEquals(
0, globalSessionService.query(globalSessionParam).getData().size());
globalSessionParam.setTimeStart(null);
Assertions.assertEquals(
3, globalSessionService.query(globalSessionParam).getData().size());
globalSessionParam.setTimeEnd(DateUtils.addHours(new Date(), 1).getTime());
Assertions.assertEquals(
0, globalSessionService.query(globalSessionParam).getData().size());
globalSessionParam.setTimeStart(DateUtils.addHours(new Date(), -1).getTime());
Assertions.assertEquals(
0, globalSessionService.query(globalSessionParam).getData().size());
} finally {
for (GlobalSession globalSession : globalSessions) {
globalSession.end();
}
SessionHolder.destroy();
}
}
/**
* On begin test.
*
* @param globalSession the global session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionProvider")
public void onBeginTest(GlobalSession globalSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.onBegin(globalSession);
sessionManager.onSuccessEnd(globalSession);
}
}
/**
* On status change test.
*
* @param globalSession the global session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionProvider")
public void onStatusChangeTest(GlobalSession globalSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.onBegin(globalSession);
sessionManager.onStatusChange(globalSession, GlobalStatus.Finished);
sessionManager.onSuccessEnd(globalSession);
}
}
@ParameterizedTest
@MethodSource("globalSessionForLockTestProvider")
public void stopGlobalSessionTest(List<GlobalSession> globalSessions) throws Exception {
try {
for (GlobalSession globalSession : globalSessions) {
globalSession.begin();
}
Assertions.assertThrows(
IllegalArgumentException.class,
() -> globalSessionService.stopGlobalRetry(
globalSessions.get(0).getXid()));
GlobalSession globalSession = globalSessions.get(1);
globalSession.changeGlobalStatus(GlobalStatus.CommitRetrying);
String xid = globalSession.getXid();
globalSessionService.stopGlobalRetry(xid);
Assertions.assertEquals(
SessionHolder.findGlobalSession(xid).getStatus(), GlobalStatus.StopCommitOrCommitRetry);
globalSession.changeGlobalStatus(GlobalStatus.RollbackRetrying);
globalSessionService.stopGlobalRetry(xid);
Assertions.assertEquals(
SessionHolder.findGlobalSession(xid).getStatus(), GlobalStatus.StopRollbackOrRollbackRetry);
} finally {
for (GlobalSession globalSession : globalSessions) {
globalSession.setStatus(GlobalStatus.Committed);
globalSession.end();
}
}
}
@ParameterizedTest
@MethodSource("globalSessionForLockTestProvider")
public void changeGlobalSessionTest(List<GlobalSession> globalSessions) throws Exception {
try {
for (GlobalSession globalSession : globalSessions) {
globalSession.begin();
}
Assertions.assertThrows(
IllegalArgumentException.class,
() -> globalSessionService.changeGlobalStatus(
globalSessions.get(0).getXid()));
Assertions.assertEquals(GlobalStatus.Begin, globalSessions.get(0).getStatus());
// TODO: After implementing robust support for concurrent multi‑module tests, add tests to verify that
// globalSession transitions to FAIL_COMMIT_STATUS and FAIL_ROLLBACK_STATUS.
} finally {
for (GlobalSession globalSession : globalSessions) {
globalSession.setStatus(GlobalStatus.Committed);
globalSession.end();
}
}
}
@ParameterizedTest
@MethodSource("globalSessionForLockTestProvider")
public void startGlobalSessionTest(List<GlobalSession> globalSessions) throws Exception {
try {
for (GlobalSession globalSession : globalSessions) {
globalSession.begin();
}
Assertions.assertThrows(
IllegalArgumentException.class,
() -> globalSessionService.startGlobalRetry(
globalSessions.get(0).getXid()));
GlobalSession globalSession = globalSessions.get(1);
globalSession.setStatus(GlobalStatus.StopCommitOrCommitRetry);
String xid = globalSession.getXid();
globalSessionService.startGlobalRetry(xid);
Assertions.assertEquals(SessionHolder.findGlobalSession(xid).getStatus(), GlobalStatus.CommitRetrying);
globalSession.setStatus(GlobalStatus.StopRollbackOrRollbackRetry);
globalSessionService.startGlobalRetry(xid);
Assertions.assertEquals(SessionHolder.findGlobalSession(xid).getStatus(), GlobalStatus.RollbackRetrying);
} finally {
for (GlobalSession globalSession : globalSessions) {
globalSession.setStatus(GlobalStatus.Committed);
globalSession.end();
}
}
}
/**
* On branch status change test.
*
* @param globalSession the global session
* @param branchSession the branch session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("branchSessionProvider")
public void onBranchStatusChangeTest(GlobalSession globalSession, BranchSession branchSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.onBegin(globalSession);
sessionManager.onAddBranch(globalSession, branchSession);
sessionManager.onBranchStatusChange(globalSession, branchSession, BranchStatus.PhaseTwo_Committed);
sessionManager.onSuccessEnd(globalSession);
}
}
/**
* On add branch test.
*
* @param globalSession the global session
* @param branchSession the branch session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("branchSessionProvider")
public void onAddBranchTest(GlobalSession globalSession, BranchSession branchSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.onBegin(globalSession);
sessionManager.onAddBranch(globalSession, branchSession);
sessionManager.onSuccessEnd(globalSession);
}
}
/**
* On remove branch test.
*
* @param globalSession the global session
* @param branchSession the branch session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("branchSessionProvider")
public void onRemoveBranchTest(GlobalSession globalSession, BranchSession branchSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.onBegin(globalSession);
sessionManager.onAddBranch(globalSession, branchSession);
sessionManager.onRemoveBranch(globalSession, branchSession);
sessionManager.onSuccessEnd(globalSession);
}
}
/**
* On close test.
*
* @param globalSession the global session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionProvider")
public void onCloseTest(GlobalSession globalSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.onBegin(globalSession);
sessionManager.onClose(globalSession);
sessionManager.onSuccessEnd(globalSession);
}
}
@ParameterizedTest
@MethodSource("branchSessionsProvider")
public void stopBranchRetryTest(GlobalSession globalSession) throws Exception {
try {
globalSession.begin();
// wrong param for xid and branchId
Assertions.assertThrows(
IllegalArgumentException.class, () -> branchSessionService.stopBranchRetry("xid", null));
Assertions.assertThrows(
IllegalArgumentException.class,
() -> branchSessionService.stopBranchRetry(globalSession.getXid(), "test"));
// wrong status for branch transaction
List<BranchSession> branchSessions = globalSession.getBranchSessions();
Assertions.assertThrows(
IllegalArgumentException.class,
() -> branchSessionService.stopBranchRetry(
globalSession.getXid(),
String.valueOf(branchSessions.get(0).getBranchId())));
// wrong status for global transaction
globalSession.setStatus(GlobalStatus.Begin);
Assertions.assertThrows(
IllegalArgumentException.class,
() -> branchSessionService.stopBranchRetry(
globalSession.getXid(),
String.valueOf(branchSessions.get(1).getBranchId())));
// success stop
globalSession.setStatus(GlobalStatus.CommitRetrying);
branchSessionService.stopBranchRetry(
globalSession.getXid(), String.valueOf(branchSessions.get(1).getBranchId()));
GlobalSession newGlobalSession = SessionHolder.findGlobalSession(globalSession.getXid());
Assertions.assertEquals(
BranchStatus.STOP_RETRY,
newGlobalSession.getBranchSessions().get(1).getStatus());
} finally {
globalSession.setStatus(GlobalStatus.Committed);
globalSession.end();
}
}
@ParameterizedTest
@MethodSource("branchSessionsProvider")
public void restartBranchFailRetryTest(GlobalSession globalSession) throws Exception {
try {
globalSession.begin();
List<BranchSession> branchSessions = globalSession.getBranchSessions();
// wrong status for branch transaction
Assertions.assertThrows(
IllegalArgumentException.class,
() -> branchSessionService.startBranchRetry(
globalSession.getXid(),
String.valueOf(branchSessions.get(0).getBranchId())));
// success
branchSessionService.startBranchRetry(
globalSession.getXid(), String.valueOf(branchSessions.get(2).getBranchId()));
GlobalSession newGlobalSession = SessionHolder.findGlobalSession(globalSession.getXid());
Assertions.assertEquals(
BranchStatus.Registered,
newGlobalSession.getBranchSessions().get(2).getStatus());
} finally {
globalSession.setStatus(GlobalStatus.Committed);
globalSession.end();
}
}
/**
* On end test.
*
* @param globalSession the global session
* @throws Exception the exception
*/
@ParameterizedTest
@MethodSource("globalSessionProvider")
public void onEndTest(GlobalSession globalSession) throws Exception {
for (SessionManager sessionManager : sessionManagerList) {
sessionManager.onBegin(globalSession);
sessionManager.onSuccessEnd(globalSession);
}
}
/**
* Global session provider object [ ] [ ].
*
* @return the object [ ] [ ]
*/
static Stream<Arguments> globalSessionProvider() {
GlobalSession globalSession = new GlobalSession("demo-app", DEFAULT_TX_GROUP, "test", 6000);
String xid = XID.generateXID(globalSession.getTransactionId());
globalSession.setXid(xid);
return Stream.of(Arguments.of(globalSession));
}
/**
* Global sessions provider object [ ] [ ].
*
* @return the object [ ] [ ]
*/
static Stream<Arguments> globalSessionsProvider() {
GlobalSession globalSession1 = new GlobalSession("demo-app", DEFAULT_TX_GROUP, "test", 6000);
GlobalSession globalSession2 = new GlobalSession("demo-app", DEFAULT_TX_GROUP, "test", 6000);
return Stream.of(Arguments.of(Arrays.asList(globalSession1, globalSession2)));
}
/**
* Global sessions provider object [ ] [ ].
*
* @return the object [ ] [ ]
*/
static Stream<Arguments> globalSessionsWithPageResultProvider() throws ParseException {
final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
GlobalSession globalSession1 = new GlobalSession("demo-app", DEFAULT_TX_GROUP, "test1", 60000);
globalSession1.setBeginTime(dateFormat.parse("2220-1-1 08:02:00").getTime());
GlobalSession globalSession2 = new GlobalSession("demo-app", DEFAULT_TX_GROUP, "test2", 60000);
globalSession2.setBeginTime(dateFormat.parse("2220-1-1 08:04:00").getTime());
GlobalSession globalSession3 = new GlobalSession("with-branchSession-app", DEFAULT_TX_GROUP, "test3", 60000);
globalSession3.setBeginTime(dateFormat.parse("2220-1-1 08:20:00").getTime());
globalSession3.setStatus(GlobalStatus.CommitFailed);
final BranchSession branchSession = new BranchSession();
branchSession.setApplicationData("applicationData");
branchSession.setResourceGroupId("applicationData");
branchSession.setClientId("clientId");
branchSession.setResourceId("resourceId");
branchSession.setLockKey("lockKey");
branchSession.setBranchType(BranchType.AT);
branchSession.setStatus(BranchStatus.Registered);
branchSession.setTransactionId(11L);
branchSession.setBranchId(22L);
branchSession.setXid("xid");
branchSession.setLockStatus(LockStatus.Locked);
globalSession3.add(branchSession);
return Stream.of(Arguments.of(Arrays.asList(globalSession1, globalSession2, globalSession3)));
}
static Stream<Arguments> globalSessionForLockTestProvider() throws ParseException {
BranchSession branchSession1 = new BranchSession();
branchSession1.setTransactionId(UUIDGenerator.generateUUID());
branchSession1.setBranchId(1L);
branchSession1.setClientId("c1");
branchSession1.setResourceGroupId(DEFAULT_TX_GROUP);
branchSession1.setResourceId("department");
branchSession1.setLockKey("a:1,2");
branchSession1.setBranchType(BranchType.AT);
branchSession1.setApplicationData("{\"data\":\"test\"}");
branchSession1.setBranchType(BranchType.AT);
BranchSession branchSession2 = new BranchSession();
branchSession2.setTransactionId(UUIDGenerator.generateUUID());
branchSession2.setBranchId(2L);
branchSession2.setClientId("c1");
branchSession2.setResourceGroupId(DEFAULT_TX_GROUP);
branchSession2.setResourceId("department");
branchSession2.setLockKey("e:3,4");
branchSession2.setBranchType(BranchType.AT);
branchSession2.setApplicationData("{\"data\":\"test\"}");
branchSession2.setBranchType(BranchType.AT);
branchSession1.setTransactionId(397215L);
branchSession2.setTransactionId(92482L);
final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
GlobalSession globalSession1 = new GlobalSession("demo-app", DEFAULT_TX_GROUP, "test1", 6000);
globalSession1.setXid("xid1");
globalSession1.add(branchSession1);
globalSession1.setBeginTime(dateFormat.parse("2022-1-1 03:00:00").getTime());
GlobalSession globalSession2 = new GlobalSession("demo-app", DEFAULT_TX_GROUP, "test2", 6000);
globalSession2.setXid("ddd1");
globalSession2.add(branchSession2);
globalSession2.setBeginTime(dateFormat.parse("2022-1-1 08:00:00").getTime());
return Stream.of(Arguments.of(Arrays.asList(globalSession1, globalSession2)));
}
/**
* Branch session provider object [ ] [ ].
*
* @return the object [ ] [ ]
*/
static Stream<Arguments> branchSessionProvider() {
GlobalSession globalSession = new GlobalSession("demo-app", DEFAULT_TX_GROUP, "test", 6000);
globalSession.setXid(XID.generateXID(globalSession.getTransactionId()));
BranchSession branchSession = new BranchSession();
branchSession.setTransactionId(globalSession.getTransactionId());
branchSession.setBranchId(1L);
branchSession.setResourceGroupId(DEFAULT_TX_GROUP);
branchSession.setResourceId("tb_1");
branchSession.setLockKey("t_1");
branchSession.setBranchType(BranchType.AT);
branchSession.setApplicationData("{\"data\":\"test\"}");
return Stream.of(Arguments.of(globalSession, branchSession));
}
/**
* Branch sessions provider object [ ] [ ].
*
* @return the object [ ] [ ]
*/
static Stream<Arguments> branchSessionsProvider() {
GlobalSession globalSession = new GlobalSession("demo-app", DEFAULT_TX_GROUP, "test", 6000);
globalSession.setXid(XID.generateXID(globalSession.getTransactionId()));
globalSession.setStatus(GlobalStatus.CommitRetrying);
BranchSession branchSession = new BranchSession();
branchSession.setBranchId(1L);
branchSession.setXid(globalSession.getXid());
branchSession.setResourceGroupId(DEFAULT_TX_GROUP);
branchSession.setStatus(BranchStatus.PhaseOne_Failed);
branchSession.setBranchType(BranchType.AT);
BranchSession branchSession1 = new BranchSession();
branchSession1.setBranchId(2L);
branchSession1.setXid(globalSession.getXid());
branchSession1.setResourceGroupId(DEFAULT_TX_GROUP);
branchSession1.setStatus(BranchStatus.Registered);
branchSession1.setBranchType(BranchType.AT);
BranchSession branchSession2 = new BranchSession();
branchSession2.setBranchId(3L);
branchSession2.setXid(globalSession.getXid());
branchSession2.setResourceGroupId(DEFAULT_TX_GROUP);
branchSession2.setStatus(BranchStatus.STOP_RETRY);
branchSession2.setBranchType(BranchType.AT);
globalSession.add(branchSession);
globalSession.add(branchSession1);
globalSession.add(branchSession2);
return Stream.of(Arguments.of(globalSession));
}
}
|
apache/james-project | 36,024 | backends-common/rabbitmq/src/main/java/org/apache/james/backends/rabbitmq/ReactorRabbitMQChannelPool.java | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.backends.rabbitmq;
import java.io.IOException;
import java.time.Duration;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import jakarta.annotation.PreDestroy;
import org.apache.james.lifecycle.api.Startable;
import org.apache.james.metrics.api.GaugeRegistry;
import org.apache.james.metrics.api.MetricFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.fge.lambdas.Throwing;
import com.google.common.base.Preconditions;
import com.rabbitmq.client.AMQP;
import com.rabbitmq.client.BuiltinExchangeType;
import com.rabbitmq.client.CancelCallback;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.Command;
import com.rabbitmq.client.ConfirmCallback;
import com.rabbitmq.client.ConfirmListener;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.Consumer;
import com.rabbitmq.client.ConsumerShutdownSignalCallback;
import com.rabbitmq.client.DeliverCallback;
import com.rabbitmq.client.GetResponse;
import com.rabbitmq.client.Method;
import com.rabbitmq.client.ReturnCallback;
import com.rabbitmq.client.ReturnListener;
import com.rabbitmq.client.ShutdownListener;
import com.rabbitmq.client.ShutdownSignalException;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import reactor.core.publisher.SignalType;
import reactor.core.scheduler.Schedulers;
import reactor.pool.InstrumentedPool;
import reactor.pool.PoolBuilder;
import reactor.pool.PooledRef;
import reactor.rabbitmq.BindingSpecification;
import reactor.rabbitmq.ChannelPool;
import reactor.rabbitmq.QueueSpecification;
import reactor.rabbitmq.RabbitFlux;
import reactor.rabbitmq.Receiver;
import reactor.rabbitmq.ReceiverOptions;
import reactor.rabbitmq.Sender;
import reactor.rabbitmq.SenderOptions;
import reactor.util.retry.Retry;
import reactor.util.retry.RetryBackoffSpec;
public class ReactorRabbitMQChannelPool implements ChannelPool, Startable {
private static class SelectOnceChannel implements Channel {
private final Channel delegate;
private final AtomicBoolean confirmSelected = new AtomicBoolean(false);
private SelectOnceChannel(Channel delegate) {
this.delegate = delegate;
}
@Override
public AMQP.Confirm.SelectOk confirmSelect() throws IOException {
if (!confirmSelected.getAndSet(true)) {
return delegate.confirmSelect();
}
return new AMQP.Confirm.SelectOk.Builder().build();
}
@Override
public int getChannelNumber() {
return delegate.getChannelNumber();
}
@Override
public Connection getConnection() {
return delegate.getConnection();
}
@Override
public void close() throws IOException, TimeoutException {
delegate.close();
}
@Override
public void close(int closeCode, String closeMessage) throws IOException, TimeoutException {
// https://www.rabbitmq.com/amqp-0-9-1-reference.html#domain.reply-code
if (closeCode >= 300) {
LOGGER.warn("Closing channel {} code:{} message:'{}'", getChannelNumber(), closeCode, closeMessage);
}
delegate.close(closeCode, closeMessage);
}
@Override
public void abort() throws IOException {
delegate.abort();
}
@Override
public void abort(int closeCode, String closeMessage) throws IOException {
// https://www.rabbitmq.com/amqp-0-9-1-reference.html#domain.reply-code
if (closeCode >= 300) {
LOGGER.warn("Closing channel {} code:{} message:'{}'", getChannelNumber(), closeCode, closeMessage);
}
delegate.abort(closeCode, closeMessage);
}
@Override
public void addReturnListener(ReturnListener listener) {
delegate.removeReturnListener(listener);
}
@Override
public ReturnListener addReturnListener(ReturnCallback returnCallback) {
return delegate.addReturnListener(returnCallback);
}
@Override
public boolean removeReturnListener(ReturnListener listener) {
return delegate.removeReturnListener(listener);
}
@Override
public void clearReturnListeners() {
delegate.clearReturnListeners();
}
@Override
public void addConfirmListener(ConfirmListener listener) {
delegate.addConfirmListener(listener);
}
@Override
public ConfirmListener addConfirmListener(ConfirmCallback ackCallback, ConfirmCallback nackCallback) {
return delegate.addConfirmListener(ackCallback, nackCallback);
}
@Override
public boolean removeConfirmListener(ConfirmListener listener) {
return delegate.removeConfirmListener(listener);
}
@Override
public void clearConfirmListeners() {
delegate.clearConfirmListeners();
}
@Override
public Consumer getDefaultConsumer() {
return delegate.getDefaultConsumer();
}
@Override
public void setDefaultConsumer(Consumer consumer) {
delegate.setDefaultConsumer(consumer);
}
@Override
public void basicQos(int prefetchSize, int prefetchCount, boolean global) throws IOException {
delegate.basicQos(prefetchSize, prefetchCount, global);
}
@Override
public void basicQos(int prefetchCount, boolean global) throws IOException {
delegate.basicQos(prefetchCount, global);
}
@Override
public void basicQos(int prefetchCount) throws IOException {
delegate.basicQos(prefetchCount);
}
@Override
public void basicPublish(String exchange, String routingKey, AMQP.BasicProperties props, byte[] body) throws IOException {
delegate.basicPublish(exchange, routingKey, props, body);
}
@Override
public void basicPublish(String exchange, String routingKey, boolean mandatory, AMQP.BasicProperties props, byte[] body) throws IOException {
delegate.basicPublish(exchange, routingKey, mandatory, props, body);
}
@Override
public void basicPublish(String exchange, String routingKey, boolean mandatory, boolean immediate, AMQP.BasicProperties props, byte[] body) throws IOException {
delegate.basicPublish(exchange, routingKey, mandatory, immediate, props, body);
}
@Override
public AMQP.Exchange.DeclareOk exchangeDeclare(String exchange, String type) throws IOException {
return delegate.exchangeDeclare(exchange, type);
}
@Override
public AMQP.Exchange.DeclareOk exchangeDeclare(String exchange, BuiltinExchangeType type) throws IOException {
return delegate.exchangeDeclare(exchange, type);
}
@Override
public AMQP.Exchange.DeclareOk exchangeDeclare(String exchange, String type, boolean durable) throws IOException {
return delegate.exchangeDeclare(exchange, type, durable);
}
@Override
public AMQP.Exchange.DeclareOk exchangeDeclare(String exchange, BuiltinExchangeType type, boolean durable) throws IOException {
return delegate.exchangeDeclare(exchange, type, durable);
}
@Override
public AMQP.Exchange.DeclareOk exchangeDeclare(String exchange, String type, boolean durable, boolean autoDelete, Map<String, Object> arguments) throws IOException {
return delegate.exchangeDeclare(exchange, type, durable, autoDelete, arguments);
}
@Override
public AMQP.Exchange.DeclareOk exchangeDeclare(String exchange, BuiltinExchangeType type, boolean durable, boolean autoDelete, Map<String, Object> arguments) throws IOException {
return delegate.exchangeDeclare(exchange, type, durable, autoDelete, arguments);
}
@Override
public AMQP.Exchange.DeclareOk exchangeDeclare(String exchange, String type, boolean durable, boolean autoDelete, boolean internal, Map<String, Object> arguments) throws IOException {
return delegate.exchangeDeclare(exchange, type, durable, autoDelete, internal, arguments);
}
@Override
public AMQP.Exchange.DeclareOk exchangeDeclare(String exchange, BuiltinExchangeType type, boolean durable, boolean autoDelete, boolean internal, Map<String, Object> arguments) throws IOException {
return delegate.exchangeDeclare(exchange, type, durable, autoDelete, internal, arguments);
}
@Override
public void exchangeDeclareNoWait(String exchange, String type, boolean durable, boolean autoDelete, boolean internal, Map<String, Object> arguments) throws IOException {
delegate.exchangeDeclareNoWait(exchange, type, durable, autoDelete, internal, arguments);
}
@Override
public void exchangeDeclareNoWait(String exchange, BuiltinExchangeType type, boolean durable, boolean autoDelete, boolean internal, Map<String, Object> arguments) throws IOException {
delegate.exchangeDeclare(exchange, type, durable, autoDelete, internal, arguments);
}
@Override
public AMQP.Exchange.DeclareOk exchangeDeclarePassive(String name) throws IOException {
return delegate.exchangeDeclarePassive(name);
}
@Override
public AMQP.Exchange.DeleteOk exchangeDelete(String exchange, boolean ifUnused) throws IOException {
return delegate.exchangeDelete(exchange, ifUnused);
}
@Override
public void exchangeDeleteNoWait(String exchange, boolean ifUnused) throws IOException {
delegate.exchangeDeleteNoWait(exchange, ifUnused);
}
@Override
public AMQP.Exchange.DeleteOk exchangeDelete(String exchange) throws IOException {
return delegate.exchangeDelete(exchange);
}
@Override
public AMQP.Exchange.BindOk exchangeBind(String destination, String source, String routingKey) throws IOException {
return delegate.exchangeBind(destination, source, routingKey);
}
@Override
public AMQP.Exchange.BindOk exchangeBind(String destination, String source, String routingKey, Map<String, Object> arguments) throws IOException {
return delegate.exchangeBind(destination, source, routingKey, arguments);
}
@Override
public void exchangeBindNoWait(String destination, String source, String routingKey, Map<String, Object> arguments) throws IOException {
delegate.exchangeBindNoWait(destination, source, routingKey, arguments);
}
@Override
public AMQP.Exchange.UnbindOk exchangeUnbind(String destination, String source, String routingKey) throws IOException {
return delegate.exchangeUnbind(destination, source, routingKey);
}
@Override
public AMQP.Exchange.UnbindOk exchangeUnbind(String destination, String source, String routingKey, Map<String, Object> arguments) throws IOException {
return delegate.exchangeUnbind(destination, source, routingKey, arguments);
}
@Override
public void exchangeUnbindNoWait(String destination, String source, String routingKey, Map<String, Object> arguments) throws IOException {
delegate.exchangeBindNoWait(destination, source, routingKey, arguments);
}
@Override
public AMQP.Queue.DeclareOk queueDeclare() throws IOException {
return delegate.queueDeclare();
}
@Override
public AMQP.Queue.DeclareOk queueDeclare(String queue, boolean durable, boolean exclusive, boolean autoDelete, Map<String, Object> arguments) throws IOException {
return delegate.queueDeclare(queue, durable, exclusive, autoDelete, arguments);
}
@Override
public void queueDeclareNoWait(String queue, boolean durable, boolean exclusive, boolean autoDelete, Map<String, Object> arguments) throws IOException {
delegate.queueDeclareNoWait(queue, durable, exclusive, autoDelete, arguments);
}
@Override
public AMQP.Queue.DeclareOk queueDeclarePassive(String queue) throws IOException {
return delegate.queueDeclarePassive(queue);
}
@Override
public AMQP.Queue.DeleteOk queueDelete(String queue) throws IOException {
return delegate.queueDelete(queue);
}
@Override
public AMQP.Queue.DeleteOk queueDelete(String queue, boolean ifUnused, boolean ifEmpty) throws IOException {
return delegate.queueDelete(queue, ifUnused, ifEmpty);
}
@Override
public void queueDeleteNoWait(String queue, boolean ifUnused, boolean ifEmpty) throws IOException {
delegate.queueDeleteNoWait(queue, ifUnused, ifEmpty);
}
@Override
public AMQP.Queue.BindOk queueBind(String queue, String exchange, String routingKey) throws IOException {
return delegate.queueBind(queue, exchange, routingKey);
}
@Override
public AMQP.Queue.BindOk queueBind(String queue, String exchange, String routingKey, Map<String, Object> arguments) throws IOException {
return delegate.queueBind(queue, exchange, routingKey, arguments);
}
@Override
public void queueBindNoWait(String queue, String exchange, String routingKey, Map<String, Object> arguments) throws IOException {
delegate.queueBindNoWait(queue, exchange, routingKey, arguments);
}
@Override
public AMQP.Queue.UnbindOk queueUnbind(String queue, String exchange, String routingKey) throws IOException {
return delegate.queueUnbind(queue, exchange, routingKey);
}
@Override
public AMQP.Queue.UnbindOk queueUnbind(String queue, String exchange, String routingKey, Map<String, Object> arguments) throws IOException {
return delegate.queueUnbind(queue, exchange, routingKey, arguments);
}
@Override
public AMQP.Queue.PurgeOk queuePurge(String queue) throws IOException {
return delegate.queuePurge(queue);
}
@Override
public GetResponse basicGet(String queue, boolean autoAck) throws IOException {
return delegate.basicGet(queue, autoAck);
}
@Override
public void basicAck(long deliveryTag, boolean multiple) throws IOException {
delegate.basicAck(deliveryTag, multiple);
}
@Override
public void basicNack(long deliveryTag, boolean multiple, boolean requeue) throws IOException {
delegate.basicNack(deliveryTag, multiple, requeue);
}
@Override
public void basicReject(long deliveryTag, boolean requeue) throws IOException {
delegate.basicReject(deliveryTag, requeue);
}
@Override
public String basicConsume(String queue, Consumer callback) throws IOException {
return delegate.basicConsume(queue, callback);
}
@Override
public String basicConsume(String queue, DeliverCallback deliverCallback, CancelCallback cancelCallback) throws IOException {
return delegate.basicConsume(queue, deliverCallback, cancelCallback);
}
@Override
public String basicConsume(String queue, DeliverCallback deliverCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
return delegate.basicConsume(queue, deliverCallback, shutdownSignalCallback);
}
@Override
public String basicConsume(String queue, DeliverCallback deliverCallback, CancelCallback cancelCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
return delegate.basicConsume(queue, deliverCallback, cancelCallback, shutdownSignalCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, Consumer callback) throws IOException {
return delegate.basicConsume(queue, autoAck, callback);
}
@Override
public String basicConsume(String queue, boolean autoAck, DeliverCallback deliverCallback, CancelCallback cancelCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, deliverCallback, cancelCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, DeliverCallback deliverCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, deliverCallback, shutdownSignalCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, DeliverCallback deliverCallback, CancelCallback cancelCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, deliverCallback, cancelCallback, shutdownSignalCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, Map<String, Object> arguments, Consumer callback) throws IOException {
return delegate.basicConsume(queue, autoAck, arguments, callback);
}
@Override
public String basicConsume(String queue, boolean autoAck, Map<String, Object> arguments, DeliverCallback deliverCallback, CancelCallback cancelCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, arguments, deliverCallback, cancelCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, Map<String, Object> arguments, DeliverCallback deliverCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, arguments, deliverCallback, shutdownSignalCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, Map<String, Object> arguments, DeliverCallback deliverCallback, CancelCallback cancelCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, arguments, deliverCallback, cancelCallback, shutdownSignalCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, Consumer callback) throws IOException {
return delegate.basicConsume(queue, autoAck, consumerTag, callback);
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, DeliverCallback deliverCallback, CancelCallback cancelCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, consumerTag, deliverCallback, cancelCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, DeliverCallback deliverCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, consumerTag, deliverCallback, shutdownSignalCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, DeliverCallback deliverCallback, CancelCallback cancelCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, consumerTag, deliverCallback, cancelCallback, shutdownSignalCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, boolean noLocal, boolean exclusive, Map<String, Object> arguments, Consumer callback) throws IOException {
return delegate.basicConsume(queue, autoAck, consumerTag, noLocal, exclusive, arguments, callback);
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, boolean noLocal, boolean exclusive, Map<String, Object> arguments, DeliverCallback deliverCallback, CancelCallback cancelCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, consumerTag, noLocal, exclusive, arguments, deliverCallback, cancelCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, boolean noLocal, boolean exclusive, Map<String, Object> arguments, DeliverCallback deliverCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, consumerTag, noLocal, exclusive, arguments, deliverCallback, shutdownSignalCallback);
}
@Override
public String basicConsume(String queue, boolean autoAck, String consumerTag, boolean noLocal, boolean exclusive, Map<String, Object> arguments, DeliverCallback deliverCallback, CancelCallback cancelCallback, ConsumerShutdownSignalCallback shutdownSignalCallback) throws IOException {
return delegate.basicConsume(queue, autoAck, consumerTag, noLocal, exclusive, arguments, deliverCallback, cancelCallback, shutdownSignalCallback);
}
@Override
public void basicCancel(String consumerTag) throws IOException {
delegate.basicCancel(consumerTag);
}
@Override
public AMQP.Basic.RecoverOk basicRecover() throws IOException {
return delegate.basicRecover();
}
@Override
public AMQP.Basic.RecoverOk basicRecover(boolean requeue) throws IOException {
return delegate.basicRecover(requeue);
}
@Override
public AMQP.Tx.SelectOk txSelect() throws IOException {
return delegate.txSelect();
}
@Override
public AMQP.Tx.CommitOk txCommit() throws IOException {
return delegate.txCommit();
}
@Override
public AMQP.Tx.RollbackOk txRollback() throws IOException {
return delegate.txRollback();
}
@Override
public long getNextPublishSeqNo() {
return delegate.getNextPublishSeqNo();
}
@Override
public boolean waitForConfirms() throws InterruptedException {
return delegate.waitForConfirms();
}
@Override
public boolean waitForConfirms(long timeout) throws InterruptedException, TimeoutException {
return delegate.waitForConfirms(timeout);
}
@Override
public void waitForConfirmsOrDie() throws IOException, InterruptedException {
delegate.waitForConfirmsOrDie();
}
@Override
public void waitForConfirmsOrDie(long timeout) throws IOException, InterruptedException, TimeoutException {
delegate.waitForConfirms(timeout);
}
@Override
public void asyncRpc(Method method) throws IOException {
delegate.asyncRpc(method);
}
@Override
public Command rpc(Method method) throws IOException {
return delegate.rpc(method);
}
@Override
public long messageCount(String queue) throws IOException {
return delegate.messageCount(queue);
}
@Override
public long consumerCount(String queue) throws IOException {
return delegate.consumerCount(queue);
}
@Override
public CompletableFuture<Command> asyncCompletableRpc(Method method) throws IOException {
return delegate.asyncCompletableRpc(method);
}
@Override
public void addShutdownListener(ShutdownListener listener) {
delegate.addShutdownListener(listener);
}
@Override
public void removeShutdownListener(ShutdownListener listener) {
delegate.removeShutdownListener(listener);
}
@Override
public ShutdownSignalException getCloseReason() {
return delegate.getCloseReason();
}
@Override
public void notifyListeners() {
delegate.notifyListeners();
}
@Override
public boolean isOpen() {
return delegate.isOpen();
}
}
public static class Configuration {
@FunctionalInterface
public interface RequiresRetries {
RequiredMaxBorrowDelay retries(int retries);
}
@FunctionalInterface
public interface RequiredMaxBorrowDelay {
RequiredMaxChannel maxBorrowDelay(Duration maxBorrowDelay);
}
@FunctionalInterface
public interface RequiredMaxChannel {
Configuration maxChannel(int maxChannel);
}
public static final Configuration DEFAULT = builder()
.retries(MAX_BORROW_RETRIES)
.maxBorrowDelay(MAX_BORROW_DELAY)
.maxChannel(MAX_CHANNELS_NUMBER);
public static RequiresRetries builder() {
return retries -> maxBorrowDelay -> maxChannel -> new Configuration(maxBorrowDelay, retries, maxChannel);
}
public static Configuration from(org.apache.commons.configuration2.Configuration configuration) {
Duration maxBorrowDelay = Optional.ofNullable(configuration.getLong("channel.pool.max.delay.ms", null))
.map(Duration::ofMillis)
.orElse(MAX_BORROW_DELAY);
return builder()
.retries(configuration.getInt("channel.pool.retries", MAX_BORROW_RETRIES))
.maxBorrowDelay(maxBorrowDelay)
.maxChannel(configuration.getInt("channel.pool.size", MAX_CHANNELS_NUMBER));
}
private final Duration maxBorrowDelay;
private final int retries;
private final int maxChannel;
public Configuration(Duration maxBorrowDelay, int retries, int maxChannel) {
this.maxBorrowDelay = maxBorrowDelay;
this.retries = retries;
this.maxChannel = maxChannel;
}
private RetryBackoffSpec backoffSpec() {
return Retry.backoff(retries, maxBorrowDelay);
}
public int getMaxChannel() {
return maxChannel;
}
}
private static final Logger LOGGER = LoggerFactory.getLogger(ReactorRabbitMQChannelPool.class);
private static final int MAX_CHANNELS_NUMBER = 3;
private static final int MAX_BORROW_RETRIES = 3;
private static final Duration MAX_BORROW_DELAY = Duration.ofSeconds(5);
private final ConcurrentHashMap<Integer, PooledRef<? extends Channel>> refs = new ConcurrentHashMap<>();
private final Mono<Connection> connectionMono;
private final Configuration configuration;
private final InstrumentedPool<? extends Channel> newPool;
private final MetricFactory metricFactory;
private Sender sender;
public ReactorRabbitMQChannelPool(Mono<Connection> connectionMono, Configuration configuration, MetricFactory metricFactory,
GaugeRegistry gaugeRegistry) {
this.connectionMono = connectionMono;
this.configuration = configuration;
this.metricFactory = metricFactory;
newPool = PoolBuilder.from(connectionMono
.flatMap(this::openChannel))
.sizeBetween(1, configuration.maxChannel)
.maxPendingAcquireUnbounded()
.evictionPredicate((channel, metadata) -> {
if (!channel.isOpen()) {
return true;
}
if (metadata.idleTime() > Duration.ofSeconds(30).toMillis()) {
return true;
}
return false;
})
.destroyHandler(channel -> Mono.fromRunnable(Throwing.runnable(() -> {
if (channel.isOpen()) {
try {
channel.close();
} catch (ShutdownSignalException e) {
// silent this error
}
}
}))
.then()
.subscribeOn(Schedulers.boundedElastic()))
.buildPool();
gaugeRegistry.register("rabbitmq.channels.acquired.size", () -> newPool.metrics().acquiredSize());
gaugeRegistry.register("rabbitmq.channels.allocated.size", () -> newPool.metrics().allocatedSize());
gaugeRegistry.register("rabbitmq.channels.idle.size", () -> newPool.metrics().idleSize());
gaugeRegistry.register("rabbitmq.channels.pending.aquire.size", () -> newPool.metrics().pendingAcquireSize());
}
private Mono<? extends Channel> openChannel(Connection connection) {
return Mono.fromCallable(connection::openChannel)
.map(maybeChannel ->
maybeChannel.orElseThrow(() -> new RuntimeException("RabbitMQ reached to maximum opened channels, cannot get more channels")))
.map(SelectOnceChannel::new)
.retryWhen(configuration.backoffSpec().scheduler(Schedulers.boundedElastic()))
.doOnError(throwable -> LOGGER.error("error when creating new channel", throwable));
}
public void start() {
sender = createSender();
}
public Sender getSender() {
return sender;
}
public Receiver createReceiver() {
return RabbitFlux.createReceiver(new ReceiverOptions().connectionMono(connectionMono));
}
@Override
public Mono<? extends Channel> getChannelMono() {
return Mono.from(metricFactory.decoratePublisherWithTimerMetric("rabbit-acquire", borrow()));
}
private Mono<? extends Channel> borrow() {
return newPool.acquire()
.timeout(configuration.maxBorrowDelay)
.doOnNext(ref -> refs.put(ref.poolable().getChannelNumber(), ref))
.map(PooledRef::poolable)
.onErrorMap(TimeoutException.class, e -> new NoSuchElementException("Timeout waiting for idle object"));
}
@Override
public BiConsumer<SignalType, Channel> getChannelCloseHandler() {
return (signalType, channel) -> metricFactory.runPublishingTimerMetric("rabbit-release",
() -> {
PooledRef<? extends Channel> pooledRef = refs.remove(channel.getChannelNumber());
if (!channel.isOpen() || !executeWithoutError(signalType)) {
pooledRef.invalidate()
.subscribe();
return;
}
pooledRef.release()
.subscribe();
});
}
private boolean executeWithoutError(SignalType signalType) {
return signalType == SignalType.ON_COMPLETE
|| signalType == SignalType.CANCEL;
}
private Sender createSender() {
return RabbitFlux.createSender(new SenderOptions()
.connectionMono(connectionMono)
.channelPool(this)
.resourceManagementChannelMono(
connectionMono.map(Throwing.function(Connection::createChannel)).cache()));
}
public Mono<Void> createWorkQueue(QueueSpecification queueSpecification, BindingSpecification bindingSpecification) {
Preconditions.checkArgument(queueSpecification.getName() != null, "WorkQueue pattern do not make sense for unnamed queues");
Preconditions.checkArgument(queueSpecification.getName().equals(bindingSpecification.getQueue()),
"Binding needs to be targetting the created queue %s instead of %s",
queueSpecification.getName(), bindingSpecification.getQueue());
return Flux.concat(
Mono.using(this::createSender,
managementSender -> managementSender.declareQueue(queueSpecification),
Sender::close)
.onErrorResume(
e -> e instanceof ShutdownSignalException
&& e.getMessage().contains("reply-code=406, reply-text=PRECONDITION_FAILED - inequivalent arg 'x-dead-letter-exchange' for queue"),
e -> {
LOGGER.warn("{} already exists without dead-letter setup. Dead lettered messages to it will be lost. " +
"To solve this, re-create the queue with the x-dead-letter-exchange argument set up.",
queueSpecification.getName());
return Mono.empty();
}),
sender.bind(bindingSpecification))
.then();
}
public Mono<Void> createWorkQueue(QueueSpecification queueSpecification) {
Preconditions.checkArgument(queueSpecification.getName() != null, "WorkQueue pattern do not make sense for unnamed queues");
return Mono.using(this::createSender,
managementSender -> managementSender.declareQueue(queueSpecification),
Sender::close)
.onErrorResume(
e -> e instanceof ShutdownSignalException
&& e.getMessage().contains("reply-code=406, reply-text=PRECONDITION_FAILED - inequivalent arg 'x-dead-letter-exchange' for queue"),
e -> {
LOGGER.warn("{} already exists without dead-letter setup. Dead lettered messages to it will be lost. " +
"To solve this, re-create the queue with the x-dead-letter-exchange argument set up.",
queueSpecification.getName());
return Mono.empty();
})
.then();
}
@PreDestroy
@Override
public void close() {
sender.close();
Flux.fromIterable(refs.values())
.flatMap(PooledRef::invalidate)
.blockLast();
refs.clear();
newPool.dispose();
}
public Mono<Boolean> tryChannel() {
return Mono.usingWhen(borrow(),
channel -> Mono.just(channel.isOpen()),
channel -> {
if (channel != null) {
PooledRef<? extends Channel> pooledRef = refs.remove(channel.getChannelNumber());
return pooledRef.release();
}
return Mono.empty();
})
.onErrorResume(any -> Mono.just(false));
}
}
|
googleapis/google-cloud-java | 35,694 | java-datalineage/proto-google-cloud-datalineage-v1/src/main/java/com/google/cloud/datacatalog/lineage/v1/CreateRunRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datacatalog/lineage/v1/lineage.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datacatalog.lineage.v1;
/**
*
*
* <pre>
* Request message for
* [CreateRun][google.cloud.datacatalog.lineage.v1.CreateRun].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.CreateRunRequest}
*/
public final class CreateRunRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datacatalog.lineage.v1.CreateRunRequest)
CreateRunRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateRunRequest.newBuilder() to construct.
private CreateRunRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateRunRequest() {
parent_ = "";
requestId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateRunRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateRunRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateRunRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.CreateRunRequest.class,
com.google.cloud.datacatalog.lineage.v1.CreateRunRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the process that should own the run.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the process that should own the run.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RUN_FIELD_NUMBER = 2;
private com.google.cloud.datacatalog.lineage.v1.Run run_;
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the run field is set.
*/
@java.lang.Override
public boolean hasRun() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The run.
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.Run getRun() {
return run_ == null ? com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance() : run_;
}
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.RunOrBuilder getRunOrBuilder() {
return run_ == null ? com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance() : run_;
}
public static final int REQUEST_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The requestId.
*/
@java.lang.Override
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
}
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The bytes for requestId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getRun());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, requestId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getRun());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(requestId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, requestId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datacatalog.lineage.v1.CreateRunRequest)) {
return super.equals(obj);
}
com.google.cloud.datacatalog.lineage.v1.CreateRunRequest other =
(com.google.cloud.datacatalog.lineage.v1.CreateRunRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasRun() != other.hasRun()) return false;
if (hasRun()) {
if (!getRun().equals(other.getRun())) return false;
}
if (!getRequestId().equals(other.getRequestId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasRun()) {
hash = (37 * hash) + RUN_FIELD_NUMBER;
hash = (53 * hash) + getRun().hashCode();
}
hash = (37 * hash) + REQUEST_ID_FIELD_NUMBER;
hash = (53 * hash) + getRequestId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.datacatalog.lineage.v1.CreateRunRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [CreateRun][google.cloud.datacatalog.lineage.v1.CreateRun].
* </pre>
*
* Protobuf type {@code google.cloud.datacatalog.lineage.v1.CreateRunRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.lineage.v1.CreateRunRequest)
com.google.cloud.datacatalog.lineage.v1.CreateRunRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateRunRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateRunRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datacatalog.lineage.v1.CreateRunRequest.class,
com.google.cloud.datacatalog.lineage.v1.CreateRunRequest.Builder.class);
}
// Construct using com.google.cloud.datacatalog.lineage.v1.CreateRunRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getRunFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
run_ = null;
if (runBuilder_ != null) {
runBuilder_.dispose();
runBuilder_ = null;
}
requestId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datacatalog.lineage.v1.LineageProto
.internal_static_google_cloud_datacatalog_lineage_v1_CreateRunRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateRunRequest getDefaultInstanceForType() {
return com.google.cloud.datacatalog.lineage.v1.CreateRunRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateRunRequest build() {
com.google.cloud.datacatalog.lineage.v1.CreateRunRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateRunRequest buildPartial() {
com.google.cloud.datacatalog.lineage.v1.CreateRunRequest result =
new com.google.cloud.datacatalog.lineage.v1.CreateRunRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.datacatalog.lineage.v1.CreateRunRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.run_ = runBuilder_ == null ? run_ : runBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.requestId_ = requestId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datacatalog.lineage.v1.CreateRunRequest) {
return mergeFrom((com.google.cloud.datacatalog.lineage.v1.CreateRunRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datacatalog.lineage.v1.CreateRunRequest other) {
if (other == com.google.cloud.datacatalog.lineage.v1.CreateRunRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasRun()) {
mergeRun(other.getRun());
}
if (!other.getRequestId().isEmpty()) {
requestId_ = other.requestId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getRunFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
requestId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The name of the process that should own the run.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the process that should own the run.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the process that should own the run.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the process that should own the run.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the process that should own the run.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.datacatalog.lineage.v1.Run run_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Run,
com.google.cloud.datacatalog.lineage.v1.Run.Builder,
com.google.cloud.datacatalog.lineage.v1.RunOrBuilder>
runBuilder_;
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the run field is set.
*/
public boolean hasRun() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The run.
*/
public com.google.cloud.datacatalog.lineage.v1.Run getRun() {
if (runBuilder_ == null) {
return run_ == null
? com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance()
: run_;
} else {
return runBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRun(com.google.cloud.datacatalog.lineage.v1.Run value) {
if (runBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
run_ = value;
} else {
runBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setRun(com.google.cloud.datacatalog.lineage.v1.Run.Builder builderForValue) {
if (runBuilder_ == null) {
run_ = builderForValue.build();
} else {
runBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeRun(com.google.cloud.datacatalog.lineage.v1.Run value) {
if (runBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& run_ != null
&& run_ != com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance()) {
getRunBuilder().mergeFrom(value);
} else {
run_ = value;
}
} else {
runBuilder_.mergeFrom(value);
}
if (run_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearRun() {
bitField0_ = (bitField0_ & ~0x00000002);
run_ = null;
if (runBuilder_ != null) {
runBuilder_.dispose();
runBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.lineage.v1.Run.Builder getRunBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getRunFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.datacatalog.lineage.v1.RunOrBuilder getRunOrBuilder() {
if (runBuilder_ != null) {
return runBuilder_.getMessageOrBuilder();
} else {
return run_ == null
? com.google.cloud.datacatalog.lineage.v1.Run.getDefaultInstance()
: run_;
}
}
/**
*
*
* <pre>
* Required. The run to create.
* </pre>
*
* <code>
* .google.cloud.datacatalog.lineage.v1.Run run = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Run,
com.google.cloud.datacatalog.lineage.v1.Run.Builder,
com.google.cloud.datacatalog.lineage.v1.RunOrBuilder>
getRunFieldBuilder() {
if (runBuilder_ == null) {
runBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.datacatalog.lineage.v1.Run,
com.google.cloud.datacatalog.lineage.v1.Run.Builder,
com.google.cloud.datacatalog.lineage.v1.RunOrBuilder>(
getRun(), getParentForChildren(), isClean());
run_ = null;
}
return runBuilder_;
}
private java.lang.Object requestId_ = "";
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The requestId.
*/
public java.lang.String getRequestId() {
java.lang.Object ref = requestId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
requestId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return The bytes for requestId.
*/
public com.google.protobuf.ByteString getRequestIdBytes() {
java.lang.Object ref = requestId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
requestId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @param value The requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearRequestId() {
requestId_ = getDefaultInstance().getRequestId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A unique identifier for this request. Restricted to 36 ASCII characters.
* A random UUID is recommended. This request is idempotent only if a
* `request_id` is provided.
* </pre>
*
* <code>string request_id = 3;</code>
*
* @param value The bytes for requestId to set.
* @return This builder for chaining.
*/
public Builder setRequestIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
requestId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.lineage.v1.CreateRunRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.datacatalog.lineage.v1.CreateRunRequest)
private static final com.google.cloud.datacatalog.lineage.v1.CreateRunRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datacatalog.lineage.v1.CreateRunRequest();
}
public static com.google.cloud.datacatalog.lineage.v1.CreateRunRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateRunRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateRunRequest>() {
@java.lang.Override
public CreateRunRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateRunRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateRunRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datacatalog.lineage.v1.CreateRunRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/james-project | 36,226 | server/data/data-jmap/src/test/java/org/apache/james/jmap/api/change/MailboxChangeRepositoryContract.java | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.jmap.api.change;
import static org.apache.james.mailbox.fixture.MailboxFixture.BOB;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatCode;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.time.ZonedDateTime;
import java.util.Optional;
import org.apache.james.jmap.api.exception.ChangeNotFoundException;
import org.apache.james.jmap.api.model.AccountId;
import org.apache.james.mailbox.model.MailboxId;
import org.assertj.core.api.SoftAssertions;
import org.junit.jupiter.api.Test;
import com.google.common.collect.ImmutableList;
public interface MailboxChangeRepositoryContract {
AccountId ACCOUNT_ID = AccountId.fromUsername(BOB);
ZonedDateTime DATE = ZonedDateTime.now();
Limit DEFAULT_NUMBER_OF_CHANGES = Limit.of(5);
State.Factory stateFactory();
MailboxChangeRepository mailboxChangeRepository();
MailboxId generateNewMailboxId();
@Test
default void saveChangeShouldSuccess() {
MailboxChangeRepository repository = mailboxChangeRepository();
State state = stateFactory().generate();
MailboxId id1 = generateNewMailboxId();
MailboxChange change = MailboxChange.builder().accountId(ACCOUNT_ID).state(state).date(DATE).isCountChange(false).created(ImmutableList.of(id1)).build();
assertThatCode(() -> repository.save(change).block())
.doesNotThrowAnyException();
}
@Test
default void getLatestStateShouldReturnInitialWhenEmpty() {
MailboxChangeRepository repository = mailboxChangeRepository();
assertThat(repository.getLatestState(ACCOUNT_ID).block())
.isEqualTo(State.INITIAL);
}
@Test
default void getLatestStateShouldReturnLastPersistedState() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id2)).build();
MailboxChange change3 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).created(ImmutableList.of(id3)).build();
repository.save(change1).block();
repository.save(change2).block();
repository.save(change3).block();
assertThat(repository.getLatestState(ACCOUNT_ID).block())
.isEqualTo(change3.getState());
}
@Test
default void getLatestStateShouldNotReturnDelegated() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id2)).build();
MailboxChange change3 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE)
.isCountChange(false)
.created(ImmutableList.of(id3))
.shared()
.build();
repository.save(change1).block();
repository.save(change2).block();
repository.save(change3).block();
assertThat(repository.getLatestState(ACCOUNT_ID).block())
.isEqualTo(change2.getState());
}
@Test
default void getLatestStateWithDelegationShouldReturnInitialWhenEmpty() {
MailboxChangeRepository repository = mailboxChangeRepository();
assertThat(repository.getLatestStateWithDelegation(ACCOUNT_ID).block())
.isEqualTo(State.INITIAL);
}
@Test
default void getLatestStateWithDelegationShouldReturnLastPersistedState() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id2)).build();
MailboxChange change3 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).created(ImmutableList.of(id3)).build();
repository.save(change1).block();
repository.save(change2).block();
repository.save(change3).block();
assertThat(repository.getLatestStateWithDelegation(ACCOUNT_ID).block())
.isEqualTo(change3.getState());
}
@Test
default void getLatestStateWithDelegationShouldReturnDelegated() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id2)).build();
MailboxChange change3 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE)
.isCountChange(false)
.created(ImmutableList.of(id3))
.shared()
.build();
repository.save(change1).block();
repository.save(change2).block();
repository.save(change3).block();
assertThat(repository.getLatestStateWithDelegation(ACCOUNT_ID).block())
.isEqualTo(change3.getState());
}
@Test
default void getChangesShouldSuccess() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).updated(ImmutableList.of(id2)).build();
repository.save(oldState).block();
repository.save(change).block();
assertThat(repository.getSinceState(ACCOUNT_ID, referenceState, Optional.empty()).block().getAllChanges())
.hasSameElementsAs(change.getUpdated());
}
@Test
default void getChangesShouldReturnEmptyWhenNoNewerState() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE).isCountChange(false).created(ImmutableList.of(id1)).build();
repository.save(oldState).block();
assertThat(repository.getSinceState(ACCOUNT_ID, referenceState, Optional.empty()).block().getAllChanges())
.isEmpty();
}
@Test
default void getChangesShouldReturnCurrentStateWhenNoNewerState() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE).isCountChange(false).created(ImmutableList.of(id1)).build();
repository.save(oldState).block();
assertThat(repository.getSinceState(ACCOUNT_ID, referenceState, Optional.empty()).block().getNewState())
.isEqualTo(oldState.getState());
}
@Test
default void getChangesShouldLimitChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxId id4 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(3)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id2)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id3)).build();
MailboxChange change3 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).created(ImmutableList.of(id4)).build();
repository.save(oldState).block();
repository.save(change1).block();
repository.save(change2).block();
repository.save(change3).block();
assertThat(repository.getSinceState(ACCOUNT_ID, referenceState, Optional.of(Limit.of(3))).block().getCreated())
.containsExactlyInAnyOrder(id2, id3, id4);
}
@Test
default void getChangesShouldReturnAllFromInitial() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxId id4 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(3)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id2)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id3)).build();
MailboxChange change3 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).created(ImmutableList.of(id4)).build();
repository.save(oldState).block();
repository.save(change1).block();
repository.save(change2).block();
repository.save(change3).block();
assertThat(repository.getSinceState(ACCOUNT_ID, State.INITIAL, Optional.of(Limit.of(3))).block().getCreated())
.containsExactlyInAnyOrder(id1, id2, id3);
}
@Test
default void getChangesFromInitialShouldReturnNewState() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxId id4 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(3)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id2)).build();
State state2 = stateFactory.generate();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(state2).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id3)).build();
MailboxChange change3 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).created(ImmutableList.of(id4)).build();
repository.save(oldState).block();
repository.save(change1).block();
repository.save(change2).block();
repository.save(change3).block();
assertThat(repository.getSinceState(ACCOUNT_ID, State.INITIAL, Optional.of(Limit.of(3))).block().getNewState())
.isEqualTo(state2);
}
@Test
default void getSinceStateFromInitialShouldNotIncludeDeletegatedChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxId id4 = generateNewMailboxId();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(3)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id2)).build();
MailboxChange change3 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).shared(true).created(ImmutableList.of(id3)).build();
MailboxChange change4 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).shared(true).created(ImmutableList.of(id4)).build();
repository.save(change1).block();
repository.save(change2).block();
repository.save(change3).block();
repository.save(change4).block();
assertThat(repository.getSinceState(ACCOUNT_ID, State.INITIAL, Optional.empty()).block().getCreated())
.containsExactlyInAnyOrder(id1, id2);
}
@Test
default void getSinceStateWithDelegationFromInitialShouldIncludeDeletegatedChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxId id4 = generateNewMailboxId();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(3)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id2)).build();
MailboxChange change3 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).shared(true).created(ImmutableList.of(id3)).build();
MailboxChange change4 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).shared(true).created(ImmutableList.of(id4)).build();
repository.save(change1).block();
repository.save(change2).block();
repository.save(change3).block();
repository.save(change4).block();
assertThat(repository.getSinceStateWithDelegation(ACCOUNT_ID, State.INITIAL, Optional.empty()).block().getCreated())
.containsExactlyInAnyOrder(id1, id2, id3, id4);
}
@Test
default void getChangesShouldLimitChangesWhenMaxChangesOmitted() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxId id4 = generateNewMailboxId();
MailboxId id5 = generateNewMailboxId();
MailboxId id6 = generateNewMailboxId();
MailboxId id7 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id2, id3, id4, id5, id6)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).created(ImmutableList.of(id7)).build();
repository.save(oldState).block();
repository.save(change1).block();
repository.save(change2).block();
assertThat(repository.getSinceState(ACCOUNT_ID, referenceState, Optional.empty()).block().getAllChanges())
.hasSameElementsAs(change1.getCreated());
}
@Test
default void getChangesShouldNotReturnMoreThanMaxChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxId id4 = generateNewMailboxId();
MailboxId id5 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id2, id3)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).created(ImmutableList.of(id4, id5)).build();
repository.save(oldState).block();
repository.save(change1).block();
repository.save(change2).block();
assertThat(repository.getSinceState(ACCOUNT_ID, referenceState, Optional.of(Limit.of(3))).block().getAllChanges())
.hasSameElementsAs(change1.getCreated());
}
@Test
default void getChangesShouldThrowWhenNumberOfChangesExceedMaxChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id2, id3)).build();
repository.save(oldState).block();
repository.save(change1).block();
assertThatThrownBy(() -> repository.getSinceState(ACCOUNT_ID, referenceState, Optional.of(Limit.of(1))).block().getAllChanges())
.isInstanceOf(CanNotCalculateChangesException.class)
.hasMessage("Current change collector limit 1 is exceeded by a single change, hence we cannot calculate changes.");
}
@Test
default void getChangesShouldReturnNewState() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id2, id3)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).updated(ImmutableList.of(id2, id3)).build();
repository.save(oldState).block();
repository.save(change1).block();
repository.save(change2).block();
assertThat(repository.getSinceState(ACCOUNT_ID, referenceState, Optional.empty()).block().getNewState())
.isEqualTo(change2.getState());
}
@Test
default void hasMoreChangesShouldBeTrueWhenMoreChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id2, id3)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).updated(ImmutableList.of(id2, id1)).build();
repository.save(oldState).block();
repository.save(change1).block();
repository.save(change2).block();
assertThat(repository.getSinceState(ACCOUNT_ID, referenceState, Optional.of(Limit.of(2))).block().hasMoreChanges())
.isTrue();
}
@Test
default void hasMoreChangesShouldBeFalseWhenNoMoreChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).created(ImmutableList.of(id2, id3)).build();
MailboxChange change2 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).updated(ImmutableList.of(id2, id3)).build();
repository.save(oldState).block();
repository.save(change1).block();
repository.save(change2).block();
assertThat(repository.getSinceState(ACCOUNT_ID, referenceState, Optional.of(Limit.of(4))).block().hasMoreChanges())
.isFalse();
}
@Test
default void changesShouldBeStoredInTheirRespectiveType() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxId id4 = generateNewMailboxId();
MailboxId id5 = generateNewMailboxId();
MailboxId id6 = generateNewMailboxId();
MailboxId id7 = generateNewMailboxId();
MailboxId id8 = generateNewMailboxId();
MailboxId id9 = generateNewMailboxId();
MailboxId id10 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(3)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id2, id3, id4, id5)).build();
MailboxChange change2 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE.minusHours(1))
.isCountChange(false)
.created(ImmutableList.of(id6, id7))
.updated(ImmutableList.of(id2, id3, id9))
.destroyed(ImmutableList.of(id4)).build();
MailboxChange change3 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE)
.isCountChange(false)
.created(ImmutableList.of(id8))
.updated(ImmutableList.of(id6, id7))
.destroyed(ImmutableList.of(id5, id10)).build();
repository.save(oldState).block();
repository.save(change1).block();
repository.save(change2).block();
repository.save(change3).block();
MailboxChanges mailboxChanges = repository.getSinceState(ACCOUNT_ID, referenceState, Optional.of(Limit.of(20))).block();
SoftAssertions.assertSoftly(softly -> {
softly.assertThat(mailboxChanges.getCreated()).containsExactlyInAnyOrder(id2, id3, id6, id7, id8);
softly.assertThat(mailboxChanges.getUpdated()).containsExactlyInAnyOrder(id9);
softly.assertThat(mailboxChanges.getDestroyed()).containsExactlyInAnyOrder(id10);
});
}
@Test
default void getChangesShouldIgnoreDuplicatedValues() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxId id3 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE.minusHours(1)).isCountChange(false).updated(ImmutableList.of(id1, id2)).build();
MailboxChange change2 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE)
.isCountChange(false)
.created(ImmutableList.of(id3))
.updated(ImmutableList.of(id1, id2))
.build();
repository.save(oldState).block();
repository.save(change1).block();
repository.save(change2).block();
MailboxChanges mailboxChanges = repository.getSinceState(ACCOUNT_ID, referenceState, Optional.of(Limit.of(3))).block();
SoftAssertions.assertSoftly(softly -> {
softly.assertThat(mailboxChanges.getUpdated()).containsExactlyInAnyOrder(id1, id2);
softly.assertThat(mailboxChanges.getCreated()).containsExactly(id3);
});
}
@Test
default void getChangesShouldReturnDelegatedChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxChange oldState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE.minusHours(2)).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change1 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE.minusHours(1))
.isCountChange(false)
.updated(ImmutableList.of(id1))
.shared()
.build();
repository.save(oldState).block();
repository.save(change1).block();
assertThat(repository.getSinceStateWithDelegation(ACCOUNT_ID, referenceState, Optional.empty()).block().getUpdated())
.containsExactly(id1);
}
@Test
default void isCountChangeOnlyShouldBeFalseWhenNoChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
assertThat(repository.getSinceState(ACCOUNT_ID, State.INITIAL, Optional.empty()).block().isCountChangesOnly())
.isFalse();
}
@Test
default void isCountChangeOnlyShouldBeFalseWhenAllNonCountChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxChange change1 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE.minusHours(1))
.isCountChange(false)
.created(ImmutableList.of(id1))
.build();
MailboxChange change2 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE)
.isCountChange(false)
.created(ImmutableList.of(id2))
.build();
repository.save(change1).block();
repository.save(change2).block();
assertThat(repository.getSinceState(ACCOUNT_ID, State.INITIAL, Optional.empty()).block().isCountChangesOnly())
.isFalse();
}
@Test
default void isCountChangeOnlyShouldBeFalseWhenMixedChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxChange change1 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE.minusHours(1))
.isCountChange(false)
.created(ImmutableList.of(id1))
.build();
MailboxChange change2 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE)
.isCountChange(false)
.updated(ImmutableList.of(id2))
.build();
repository.save(change1).block();
repository.save(change2).block();
assertThat(repository.getSinceState(ACCOUNT_ID, State.INITIAL, Optional.empty()).block().isCountChangesOnly())
.isFalse();
}
@Test
default void isCountChangeOnlyShouldBeTrueWhenAllCountChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxChange change1 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE.minusHours(1))
.isCountChange(true)
.updated(ImmutableList.of(id1))
.build();
MailboxChange change2 = MailboxChange.builder()
.accountId(ACCOUNT_ID)
.state(stateFactory.generate())
.date(DATE)
.isCountChange(true)
.updated(ImmutableList.of(id2))
.build();
repository.save(change1).block();
repository.save(change2).block();
assertThat(repository.getSinceStateWithDelegation(ACCOUNT_ID, State.INITIAL, Optional.empty()).block().isCountChangesOnly())
.isTrue();
}
@Test
default void getChangesShouldFailWhenInvalidMaxChanges() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
MailboxId id1 = generateNewMailboxId();
MailboxId id2 = generateNewMailboxId();
MailboxChange currentState = MailboxChange.builder().accountId(ACCOUNT_ID).state(referenceState).date(DATE).isCountChange(false).created(ImmutableList.of(id1)).build();
MailboxChange change = MailboxChange.builder().accountId(ACCOUNT_ID).state(stateFactory.generate()).date(DATE).isCountChange(false).created(ImmutableList.of(id2)).build();
repository.save(currentState).block();
repository.save(change).block();
assertThatThrownBy(() -> repository.getSinceState(ACCOUNT_ID, referenceState, Optional.of(Limit.of(-1))))
.isInstanceOf(IllegalArgumentException.class);
}
@Test
default void getChangesShouldFailWhenSinceStateNotFound() {
MailboxChangeRepository repository = mailboxChangeRepository();
State.Factory stateFactory = stateFactory();
State referenceState = stateFactory.generate();
assertThatThrownBy(() -> repository.getSinceState(ACCOUNT_ID, referenceState, Optional.empty()).block())
.isInstanceOf(ChangeNotFoundException.class);
}
}
|
googleapis/google-cloud-java | 35,787 | java-video-stitcher/proto-google-cloud-video-stitcher-v1/src/main/java/com/google/cloud/video/stitcher/v1/UpdateLiveConfigRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/video/stitcher/v1/video_stitcher_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.video.stitcher.v1;
/**
*
*
* <pre>
* Request message for VideoStitcherService.updateLiveConfig.
* </pre>
*
* Protobuf type {@code google.cloud.video.stitcher.v1.UpdateLiveConfigRequest}
*/
public final class UpdateLiveConfigRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.video.stitcher.v1.UpdateLiveConfigRequest)
UpdateLiveConfigRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateLiveConfigRequest.newBuilder() to construct.
private UpdateLiveConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateLiveConfigRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateLiveConfigRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_UpdateLiveConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_UpdateLiveConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest.class,
com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest.Builder.class);
}
private int bitField0_;
public static final int LIVE_CONFIG_FIELD_NUMBER = 1;
private com.google.cloud.video.stitcher.v1.LiveConfig liveConfig_;
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the liveConfig field is set.
*/
@java.lang.Override
public boolean hasLiveConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The liveConfig.
*/
@java.lang.Override
public com.google.cloud.video.stitcher.v1.LiveConfig getLiveConfig() {
return liveConfig_ == null
? com.google.cloud.video.stitcher.v1.LiveConfig.getDefaultInstance()
: liveConfig_;
}
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.video.stitcher.v1.LiveConfigOrBuilder getLiveConfigOrBuilder() {
return liveConfig_ == null
? com.google.cloud.video.stitcher.v1.LiveConfig.getDefaultInstance()
: liveConfig_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getLiveConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getLiveConfig());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest)) {
return super.equals(obj);
}
com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest other =
(com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest) obj;
if (hasLiveConfig() != other.hasLiveConfig()) return false;
if (hasLiveConfig()) {
if (!getLiveConfig().equals(other.getLiveConfig())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasLiveConfig()) {
hash = (37 * hash) + LIVE_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getLiveConfig().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for VideoStitcherService.updateLiveConfig.
* </pre>
*
* Protobuf type {@code google.cloud.video.stitcher.v1.UpdateLiveConfigRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.video.stitcher.v1.UpdateLiveConfigRequest)
com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_UpdateLiveConfigRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_UpdateLiveConfigRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest.class,
com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest.Builder.class);
}
// Construct using com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getLiveConfigFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
liveConfig_ = null;
if (liveConfigBuilder_ != null) {
liveConfigBuilder_.dispose();
liveConfigBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.video.stitcher.v1.VideoStitcherServiceProto
.internal_static_google_cloud_video_stitcher_v1_UpdateLiveConfigRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest getDefaultInstanceForType() {
return com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest build() {
com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest buildPartial() {
com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest result =
new com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.liveConfig_ = liveConfigBuilder_ == null ? liveConfig_ : liveConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest) {
return mergeFrom((com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest other) {
if (other == com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest.getDefaultInstance())
return this;
if (other.hasLiveConfig()) {
mergeLiveConfig(other.getLiveConfig());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getLiveConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.video.stitcher.v1.LiveConfig liveConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.video.stitcher.v1.LiveConfig,
com.google.cloud.video.stitcher.v1.LiveConfig.Builder,
com.google.cloud.video.stitcher.v1.LiveConfigOrBuilder>
liveConfigBuilder_;
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the liveConfig field is set.
*/
public boolean hasLiveConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The liveConfig.
*/
public com.google.cloud.video.stitcher.v1.LiveConfig getLiveConfig() {
if (liveConfigBuilder_ == null) {
return liveConfig_ == null
? com.google.cloud.video.stitcher.v1.LiveConfig.getDefaultInstance()
: liveConfig_;
} else {
return liveConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setLiveConfig(com.google.cloud.video.stitcher.v1.LiveConfig value) {
if (liveConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
liveConfig_ = value;
} else {
liveConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setLiveConfig(
com.google.cloud.video.stitcher.v1.LiveConfig.Builder builderForValue) {
if (liveConfigBuilder_ == null) {
liveConfig_ = builderForValue.build();
} else {
liveConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeLiveConfig(com.google.cloud.video.stitcher.v1.LiveConfig value) {
if (liveConfigBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& liveConfig_ != null
&& liveConfig_ != com.google.cloud.video.stitcher.v1.LiveConfig.getDefaultInstance()) {
getLiveConfigBuilder().mergeFrom(value);
} else {
liveConfig_ = value;
}
} else {
liveConfigBuilder_.mergeFrom(value);
}
if (liveConfig_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearLiveConfig() {
bitField0_ = (bitField0_ & ~0x00000001);
liveConfig_ = null;
if (liveConfigBuilder_ != null) {
liveConfigBuilder_.dispose();
liveConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.video.stitcher.v1.LiveConfig.Builder getLiveConfigBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getLiveConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.video.stitcher.v1.LiveConfigOrBuilder getLiveConfigOrBuilder() {
if (liveConfigBuilder_ != null) {
return liveConfigBuilder_.getMessageOrBuilder();
} else {
return liveConfig_ == null
? com.google.cloud.video.stitcher.v1.LiveConfig.getDefaultInstance()
: liveConfig_;
}
}
/**
*
*
* <pre>
* Required. The LiveConfig resource which replaces the resource on the
* server.
* </pre>
*
* <code>
* .google.cloud.video.stitcher.v1.LiveConfig live_config = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.video.stitcher.v1.LiveConfig,
com.google.cloud.video.stitcher.v1.LiveConfig.Builder,
com.google.cloud.video.stitcher.v1.LiveConfigOrBuilder>
getLiveConfigFieldBuilder() {
if (liveConfigBuilder_ == null) {
liveConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.video.stitcher.v1.LiveConfig,
com.google.cloud.video.stitcher.v1.LiveConfig.Builder,
com.google.cloud.video.stitcher.v1.LiveConfigOrBuilder>(
getLiveConfig(), getParentForChildren(), isClean());
liveConfig_ = null;
}
return liveConfigBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The update mask applies to the resource.
* For the `FieldMask` definition, see
* https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.video.stitcher.v1.UpdateLiveConfigRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.video.stitcher.v1.UpdateLiveConfigRequest)
private static final com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest();
}
public static com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateLiveConfigRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateLiveConfigRequest>() {
@java.lang.Override
public UpdateLiveConfigRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateLiveConfigRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateLiveConfigRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.video.stitcher.v1.UpdateLiveConfigRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,795 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ResourceRuntimeSpec.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/persistent_resource.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Configuration for the runtime on a PersistentResource instance, including
* but not limited to:
*
* * Service accounts used to run the workloads.
* * Whether to make it a dedicated Ray Cluster.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ResourceRuntimeSpec}
*/
public final class ResourceRuntimeSpec extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ResourceRuntimeSpec)
ResourceRuntimeSpecOrBuilder {
private static final long serialVersionUID = 0L;
// Use ResourceRuntimeSpec.newBuilder() to construct.
private ResourceRuntimeSpec(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ResourceRuntimeSpec() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ResourceRuntimeSpec();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.PersistentResourceProto
.internal_static_google_cloud_aiplatform_v1_ResourceRuntimeSpec_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.PersistentResourceProto
.internal_static_google_cloud_aiplatform_v1_ResourceRuntimeSpec_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ResourceRuntimeSpec.class,
com.google.cloud.aiplatform.v1.ResourceRuntimeSpec.Builder.class);
}
private int bitField0_;
public static final int SERVICE_ACCOUNT_SPEC_FIELD_NUMBER = 2;
private com.google.cloud.aiplatform.v1.ServiceAccountSpec serviceAccountSpec_;
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the serviceAccountSpec field is set.
*/
@java.lang.Override
public boolean hasServiceAccountSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The serviceAccountSpec.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ServiceAccountSpec getServiceAccountSpec() {
return serviceAccountSpec_ == null
? com.google.cloud.aiplatform.v1.ServiceAccountSpec.getDefaultInstance()
: serviceAccountSpec_;
}
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.ServiceAccountSpecOrBuilder
getServiceAccountSpecOrBuilder() {
return serviceAccountSpec_ == null
? com.google.cloud.aiplatform.v1.ServiceAccountSpec.getDefaultInstance()
: serviceAccountSpec_;
}
public static final int RAY_SPEC_FIELD_NUMBER = 1;
private com.google.cloud.aiplatform.v1.RaySpec raySpec_;
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the raySpec field is set.
*/
@java.lang.Override
public boolean hasRaySpec() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The raySpec.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.RaySpec getRaySpec() {
return raySpec_ == null
? com.google.cloud.aiplatform.v1.RaySpec.getDefaultInstance()
: raySpec_;
}
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.RaySpecOrBuilder getRaySpecOrBuilder() {
return raySpec_ == null
? com.google.cloud.aiplatform.v1.RaySpec.getDefaultInstance()
: raySpec_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(1, getRaySpec());
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getServiceAccountSpec());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getRaySpec());
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getServiceAccountSpec());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.ResourceRuntimeSpec)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.ResourceRuntimeSpec other =
(com.google.cloud.aiplatform.v1.ResourceRuntimeSpec) obj;
if (hasServiceAccountSpec() != other.hasServiceAccountSpec()) return false;
if (hasServiceAccountSpec()) {
if (!getServiceAccountSpec().equals(other.getServiceAccountSpec())) return false;
}
if (hasRaySpec() != other.hasRaySpec()) return false;
if (hasRaySpec()) {
if (!getRaySpec().equals(other.getRaySpec())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasServiceAccountSpec()) {
hash = (37 * hash) + SERVICE_ACCOUNT_SPEC_FIELD_NUMBER;
hash = (53 * hash) + getServiceAccountSpec().hashCode();
}
if (hasRaySpec()) {
hash = (37 * hash) + RAY_SPEC_FIELD_NUMBER;
hash = (53 * hash) + getRaySpec().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.ResourceRuntimeSpec prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Configuration for the runtime on a PersistentResource instance, including
* but not limited to:
*
* * Service accounts used to run the workloads.
* * Whether to make it a dedicated Ray Cluster.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.ResourceRuntimeSpec}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ResourceRuntimeSpec)
com.google.cloud.aiplatform.v1.ResourceRuntimeSpecOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.PersistentResourceProto
.internal_static_google_cloud_aiplatform_v1_ResourceRuntimeSpec_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.PersistentResourceProto
.internal_static_google_cloud_aiplatform_v1_ResourceRuntimeSpec_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.ResourceRuntimeSpec.class,
com.google.cloud.aiplatform.v1.ResourceRuntimeSpec.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.ResourceRuntimeSpec.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getServiceAccountSpecFieldBuilder();
getRaySpecFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
serviceAccountSpec_ = null;
if (serviceAccountSpecBuilder_ != null) {
serviceAccountSpecBuilder_.dispose();
serviceAccountSpecBuilder_ = null;
}
raySpec_ = null;
if (raySpecBuilder_ != null) {
raySpecBuilder_.dispose();
raySpecBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.PersistentResourceProto
.internal_static_google_cloud_aiplatform_v1_ResourceRuntimeSpec_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ResourceRuntimeSpec getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.ResourceRuntimeSpec.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ResourceRuntimeSpec build() {
com.google.cloud.aiplatform.v1.ResourceRuntimeSpec result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ResourceRuntimeSpec buildPartial() {
com.google.cloud.aiplatform.v1.ResourceRuntimeSpec result =
new com.google.cloud.aiplatform.v1.ResourceRuntimeSpec(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.ResourceRuntimeSpec result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.serviceAccountSpec_ =
serviceAccountSpecBuilder_ == null
? serviceAccountSpec_
: serviceAccountSpecBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.raySpec_ = raySpecBuilder_ == null ? raySpec_ : raySpecBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.ResourceRuntimeSpec) {
return mergeFrom((com.google.cloud.aiplatform.v1.ResourceRuntimeSpec) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.ResourceRuntimeSpec other) {
if (other == com.google.cloud.aiplatform.v1.ResourceRuntimeSpec.getDefaultInstance())
return this;
if (other.hasServiceAccountSpec()) {
mergeServiceAccountSpec(other.getServiceAccountSpec());
}
if (other.hasRaySpec()) {
mergeRaySpec(other.getRaySpec());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getRaySpecFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 10
case 18:
{
input.readMessage(
getServiceAccountSpecFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.aiplatform.v1.ServiceAccountSpec serviceAccountSpec_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.ServiceAccountSpec,
com.google.cloud.aiplatform.v1.ServiceAccountSpec.Builder,
com.google.cloud.aiplatform.v1.ServiceAccountSpecOrBuilder>
serviceAccountSpecBuilder_;
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the serviceAccountSpec field is set.
*/
public boolean hasServiceAccountSpec() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The serviceAccountSpec.
*/
public com.google.cloud.aiplatform.v1.ServiceAccountSpec getServiceAccountSpec() {
if (serviceAccountSpecBuilder_ == null) {
return serviceAccountSpec_ == null
? com.google.cloud.aiplatform.v1.ServiceAccountSpec.getDefaultInstance()
: serviceAccountSpec_;
} else {
return serviceAccountSpecBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setServiceAccountSpec(com.google.cloud.aiplatform.v1.ServiceAccountSpec value) {
if (serviceAccountSpecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
serviceAccountSpec_ = value;
} else {
serviceAccountSpecBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setServiceAccountSpec(
com.google.cloud.aiplatform.v1.ServiceAccountSpec.Builder builderForValue) {
if (serviceAccountSpecBuilder_ == null) {
serviceAccountSpec_ = builderForValue.build();
} else {
serviceAccountSpecBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeServiceAccountSpec(
com.google.cloud.aiplatform.v1.ServiceAccountSpec value) {
if (serviceAccountSpecBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& serviceAccountSpec_ != null
&& serviceAccountSpec_
!= com.google.cloud.aiplatform.v1.ServiceAccountSpec.getDefaultInstance()) {
getServiceAccountSpecBuilder().mergeFrom(value);
} else {
serviceAccountSpec_ = value;
}
} else {
serviceAccountSpecBuilder_.mergeFrom(value);
}
if (serviceAccountSpec_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearServiceAccountSpec() {
bitField0_ = (bitField0_ & ~0x00000001);
serviceAccountSpec_ = null;
if (serviceAccountSpecBuilder_ != null) {
serviceAccountSpecBuilder_.dispose();
serviceAccountSpecBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.aiplatform.v1.ServiceAccountSpec.Builder
getServiceAccountSpecBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getServiceAccountSpecFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.aiplatform.v1.ServiceAccountSpecOrBuilder
getServiceAccountSpecOrBuilder() {
if (serviceAccountSpecBuilder_ != null) {
return serviceAccountSpecBuilder_.getMessageOrBuilder();
} else {
return serviceAccountSpec_ == null
? com.google.cloud.aiplatform.v1.ServiceAccountSpec.getDefaultInstance()
: serviceAccountSpec_;
}
}
/**
*
*
* <pre>
* Optional. Configure the use of workload identity on the PersistentResource
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.ServiceAccountSpec service_account_spec = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.ServiceAccountSpec,
com.google.cloud.aiplatform.v1.ServiceAccountSpec.Builder,
com.google.cloud.aiplatform.v1.ServiceAccountSpecOrBuilder>
getServiceAccountSpecFieldBuilder() {
if (serviceAccountSpecBuilder_ == null) {
serviceAccountSpecBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.ServiceAccountSpec,
com.google.cloud.aiplatform.v1.ServiceAccountSpec.Builder,
com.google.cloud.aiplatform.v1.ServiceAccountSpecOrBuilder>(
getServiceAccountSpec(), getParentForChildren(), isClean());
serviceAccountSpec_ = null;
}
return serviceAccountSpecBuilder_;
}
private com.google.cloud.aiplatform.v1.RaySpec raySpec_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.RaySpec,
com.google.cloud.aiplatform.v1.RaySpec.Builder,
com.google.cloud.aiplatform.v1.RaySpecOrBuilder>
raySpecBuilder_;
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the raySpec field is set.
*/
public boolean hasRaySpec() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The raySpec.
*/
public com.google.cloud.aiplatform.v1.RaySpec getRaySpec() {
if (raySpecBuilder_ == null) {
return raySpec_ == null
? com.google.cloud.aiplatform.v1.RaySpec.getDefaultInstance()
: raySpec_;
} else {
return raySpecBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setRaySpec(com.google.cloud.aiplatform.v1.RaySpec value) {
if (raySpecBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
raySpec_ = value;
} else {
raySpecBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setRaySpec(com.google.cloud.aiplatform.v1.RaySpec.Builder builderForValue) {
if (raySpecBuilder_ == null) {
raySpec_ = builderForValue.build();
} else {
raySpecBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeRaySpec(com.google.cloud.aiplatform.v1.RaySpec value) {
if (raySpecBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& raySpec_ != null
&& raySpec_ != com.google.cloud.aiplatform.v1.RaySpec.getDefaultInstance()) {
getRaySpecBuilder().mergeFrom(value);
} else {
raySpec_ = value;
}
} else {
raySpecBuilder_.mergeFrom(value);
}
if (raySpec_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearRaySpec() {
bitField0_ = (bitField0_ & ~0x00000002);
raySpec_ = null;
if (raySpecBuilder_ != null) {
raySpecBuilder_.dispose();
raySpecBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.aiplatform.v1.RaySpec.Builder getRaySpecBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getRaySpecFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.cloud.aiplatform.v1.RaySpecOrBuilder getRaySpecOrBuilder() {
if (raySpecBuilder_ != null) {
return raySpecBuilder_.getMessageOrBuilder();
} else {
return raySpec_ == null
? com.google.cloud.aiplatform.v1.RaySpec.getDefaultInstance()
: raySpec_;
}
}
/**
*
*
* <pre>
* Optional. Ray cluster configuration.
* Required when creating a dedicated RayCluster on the PersistentResource.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.RaySpec ray_spec = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.RaySpec,
com.google.cloud.aiplatform.v1.RaySpec.Builder,
com.google.cloud.aiplatform.v1.RaySpecOrBuilder>
getRaySpecFieldBuilder() {
if (raySpecBuilder_ == null) {
raySpecBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.aiplatform.v1.RaySpec,
com.google.cloud.aiplatform.v1.RaySpec.Builder,
com.google.cloud.aiplatform.v1.RaySpecOrBuilder>(
getRaySpec(), getParentForChildren(), isClean());
raySpec_ = null;
}
return raySpecBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ResourceRuntimeSpec)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ResourceRuntimeSpec)
private static final com.google.cloud.aiplatform.v1.ResourceRuntimeSpec DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ResourceRuntimeSpec();
}
public static com.google.cloud.aiplatform.v1.ResourceRuntimeSpec getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ResourceRuntimeSpec> PARSER =
new com.google.protobuf.AbstractParser<ResourceRuntimeSpec>() {
@java.lang.Override
public ResourceRuntimeSpec parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ResourceRuntimeSpec> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ResourceRuntimeSpec> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.ResourceRuntimeSpec getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hadoop-common | 35,739 | hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalJobRunner.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.ProtocolSignature;
import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus;
import org.apache.hadoop.mapreduce.ClusterMetrics;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.QueueInfo;
import org.apache.hadoop.mapreduce.TaskCompletionEvent;
import org.apache.hadoop.mapreduce.TaskTrackerInfo;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID;
import org.apache.hadoop.mapreduce.protocol.ClientProtocol;
import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader;
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.mapreduce.v2.LogParams;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.ReflectionUtils;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
/** Implements MapReduce locally, in-process, for debugging. */
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class LocalJobRunner implements ClientProtocol {
public static final Log LOG =
LogFactory.getLog(LocalJobRunner.class);
/** The maximum number of map tasks to run in parallel in LocalJobRunner */
public static final String LOCAL_MAX_MAPS =
"mapreduce.local.map.tasks.maximum";
/** The maximum number of reduce tasks to run in parallel in LocalJobRunner */
public static final String LOCAL_MAX_REDUCES =
"mapreduce.local.reduce.tasks.maximum";
private FileSystem fs;
private HashMap<JobID, Job> jobs = new HashMap<JobID, Job>();
private JobConf conf;
private AtomicInteger map_tasks = new AtomicInteger(0);
private AtomicInteger reduce_tasks = new AtomicInteger(0);
final Random rand = new Random();
private LocalJobRunnerMetrics myMetrics = null;
private static final String jobDir = "localRunner/";
public long getProtocolVersion(String protocol, long clientVersion) {
return ClientProtocol.versionID;
}
@Override
public ProtocolSignature getProtocolSignature(String protocol,
long clientVersion, int clientMethodsHash) throws IOException {
return ProtocolSignature.getProtocolSignature(
this, protocol, clientVersion, clientMethodsHash);
}
private class Job extends Thread implements TaskUmbilicalProtocol {
// The job directory on the system: JobClient places job configurations here.
// This is analogous to JobTracker's system directory.
private Path systemJobDir;
private Path systemJobFile;
// The job directory for the task. Analagous to a task's job directory.
private Path localJobDir;
private Path localJobFile;
private JobID id;
private JobConf job;
private int numMapTasks;
private int numReduceTasks;
private float [] partialMapProgress;
private float [] partialReduceProgress;
private Counters [] mapCounters;
private Counters [] reduceCounters;
private JobStatus status;
private List<TaskAttemptID> mapIds = Collections.synchronizedList(
new ArrayList<TaskAttemptID>());
private JobProfile profile;
private FileSystem localFs;
boolean killed = false;
private LocalDistributedCacheManager localDistributedCacheManager;
public long getProtocolVersion(String protocol, long clientVersion) {
return TaskUmbilicalProtocol.versionID;
}
@Override
public ProtocolSignature getProtocolSignature(String protocol,
long clientVersion, int clientMethodsHash) throws IOException {
return ProtocolSignature.getProtocolSignature(
this, protocol, clientVersion, clientMethodsHash);
}
public Job(JobID jobid, String jobSubmitDir) throws IOException {
this.systemJobDir = new Path(jobSubmitDir);
this.systemJobFile = new Path(systemJobDir, "job.xml");
this.id = jobid;
JobConf conf = new JobConf(systemJobFile);
this.localFs = FileSystem.getLocal(conf);
String user = UserGroupInformation.getCurrentUser().getShortUserName();
this.localJobDir = localFs.makeQualified(new Path(
new Path(conf.getLocalPath(jobDir), user), jobid.toString()));
this.localJobFile = new Path(this.localJobDir, id + ".xml");
// Manage the distributed cache. If there are files to be copied,
// this will trigger localFile to be re-written again.
localDistributedCacheManager = new LocalDistributedCacheManager();
localDistributedCacheManager.setup(conf);
// Write out configuration file. Instead of copying it from
// systemJobFile, we re-write it, since setup(), above, may have
// updated it.
OutputStream out = localFs.create(localJobFile);
try {
conf.writeXml(out);
} finally {
out.close();
}
this.job = new JobConf(localJobFile);
// Job (the current object) is a Thread, so we wrap its class loader.
if (localDistributedCacheManager.hasLocalClasspaths()) {
setContextClassLoader(localDistributedCacheManager.makeClassLoader(
getContextClassLoader()));
}
profile = new JobProfile(job.getUser(), id, systemJobFile.toString(),
"http://localhost:8080/", job.getJobName());
status = new JobStatus(id, 0.0f, 0.0f, JobStatus.RUNNING,
profile.getUser(), profile.getJobName(), profile.getJobFile(),
profile.getURL().toString());
jobs.put(id, this);
this.start();
}
protected abstract class RunnableWithThrowable implements Runnable {
public volatile Throwable storedException;
}
/**
* A Runnable instance that handles a map task to be run by an executor.
*/
protected class MapTaskRunnable extends RunnableWithThrowable {
private final int taskId;
private final TaskSplitMetaInfo info;
private final JobID jobId;
private final JobConf localConf;
// This is a reference to a shared object passed in by the
// external context; this delivers state to the reducers regarding
// where to fetch mapper outputs.
private final Map<TaskAttemptID, MapOutputFile> mapOutputFiles;
public MapTaskRunnable(TaskSplitMetaInfo info, int taskId, JobID jobId,
Map<TaskAttemptID, MapOutputFile> mapOutputFiles) {
this.info = info;
this.taskId = taskId;
this.mapOutputFiles = mapOutputFiles;
this.jobId = jobId;
this.localConf = new JobConf(job);
}
public void run() {
try {
TaskAttemptID mapId = new TaskAttemptID(new TaskID(
jobId, TaskType.MAP, taskId), 0);
LOG.info("Starting task: " + mapId);
mapIds.add(mapId);
MapTask map = new MapTask(systemJobFile.toString(), mapId, taskId,
info.getSplitIndex(), 1);
map.setUser(UserGroupInformation.getCurrentUser().
getShortUserName());
setupChildMapredLocalDirs(map, localConf);
MapOutputFile mapOutput = new MROutputFiles();
mapOutput.setConf(localConf);
mapOutputFiles.put(mapId, mapOutput);
map.setJobFile(localJobFile.toString());
localConf.setUser(map.getUser());
map.localizeConfiguration(localConf);
map.setConf(localConf);
try {
map_tasks.getAndIncrement();
myMetrics.launchMap(mapId);
map.run(localConf, Job.this);
myMetrics.completeMap(mapId);
} finally {
map_tasks.getAndDecrement();
}
LOG.info("Finishing task: " + mapId);
} catch (Throwable e) {
this.storedException = e;
}
}
}
/**
* Create Runnables to encapsulate map tasks for use by the executor
* service.
* @param taskInfo Info about the map task splits
* @param jobId the job id
* @param mapOutputFiles a mapping from task attempts to output files
* @return a List of Runnables, one per map task.
*/
protected List<RunnableWithThrowable> getMapTaskRunnables(
TaskSplitMetaInfo [] taskInfo, JobID jobId,
Map<TaskAttemptID, MapOutputFile> mapOutputFiles) {
int numTasks = 0;
ArrayList<RunnableWithThrowable> list =
new ArrayList<RunnableWithThrowable>();
for (TaskSplitMetaInfo task : taskInfo) {
list.add(new MapTaskRunnable(task, numTasks++, jobId,
mapOutputFiles));
}
return list;
}
protected class ReduceTaskRunnable extends RunnableWithThrowable {
private final int taskId;
private final JobID jobId;
private final JobConf localConf;
// This is a reference to a shared object passed in by the
// external context; this delivers state to the reducers regarding
// where to fetch mapper outputs.
private final Map<TaskAttemptID, MapOutputFile> mapOutputFiles;
public ReduceTaskRunnable(int taskId, JobID jobId,
Map<TaskAttemptID, MapOutputFile> mapOutputFiles) {
this.taskId = taskId;
this.jobId = jobId;
this.mapOutputFiles = mapOutputFiles;
this.localConf = new JobConf(job);
this.localConf.set("mapreduce.jobtracker.address", "local");
}
public void run() {
try {
TaskAttemptID reduceId = new TaskAttemptID(new TaskID(
jobId, TaskType.REDUCE, taskId), 0);
LOG.info("Starting task: " + reduceId);
ReduceTask reduce = new ReduceTask(systemJobFile.toString(),
reduceId, taskId, mapIds.size(), 1);
reduce.setUser(UserGroupInformation.getCurrentUser().
getShortUserName());
setupChildMapredLocalDirs(reduce, localConf);
reduce.setLocalMapFiles(mapOutputFiles);
if (!Job.this.isInterrupted()) {
reduce.setJobFile(localJobFile.toString());
localConf.setUser(reduce.getUser());
reduce.localizeConfiguration(localConf);
reduce.setConf(localConf);
try {
reduce_tasks.getAndIncrement();
myMetrics.launchReduce(reduce.getTaskID());
reduce.run(localConf, Job.this);
myMetrics.completeReduce(reduce.getTaskID());
} finally {
reduce_tasks.getAndDecrement();
}
LOG.info("Finishing task: " + reduceId);
} else {
throw new InterruptedException();
}
} catch (Throwable t) {
// store this to be rethrown in the initial thread context.
this.storedException = t;
}
}
}
/**
* Create Runnables to encapsulate reduce tasks for use by the executor
* service.
* @param jobId the job id
* @param mapOutputFiles a mapping from task attempts to output files
* @return a List of Runnables, one per reduce task.
*/
protected List<RunnableWithThrowable> getReduceTaskRunnables(
JobID jobId, Map<TaskAttemptID, MapOutputFile> mapOutputFiles) {
int taskId = 0;
ArrayList<RunnableWithThrowable> list =
new ArrayList<RunnableWithThrowable>();
for (int i = 0; i < this.numReduceTasks; i++) {
list.add(new ReduceTaskRunnable(taskId++, jobId, mapOutputFiles));
}
return list;
}
/**
* Initialize the counters that will hold partial-progress from
* the various task attempts.
* @param numMaps the number of map tasks in this job.
*/
private synchronized void initCounters(int numMaps, int numReduces) {
// Initialize state trackers for all map tasks.
this.partialMapProgress = new float[numMaps];
this.mapCounters = new Counters[numMaps];
for (int i = 0; i < numMaps; i++) {
this.mapCounters[i] = new Counters();
}
this.partialReduceProgress = new float[numReduces];
this.reduceCounters = new Counters[numReduces];
for (int i = 0; i < numReduces; i++) {
this.reduceCounters[i] = new Counters();
}
this.numMapTasks = numMaps;
this.numReduceTasks = numReduces;
}
/**
* Creates the executor service used to run map tasks.
*
* @return an ExecutorService instance that handles map tasks
*/
protected synchronized ExecutorService createMapExecutor() {
// Determine the size of the thread pool to use
int maxMapThreads = job.getInt(LOCAL_MAX_MAPS, 1);
if (maxMapThreads < 1) {
throw new IllegalArgumentException(
"Configured " + LOCAL_MAX_MAPS + " must be >= 1");
}
maxMapThreads = Math.min(maxMapThreads, this.numMapTasks);
maxMapThreads = Math.max(maxMapThreads, 1); // In case of no tasks.
LOG.debug("Starting mapper thread pool executor.");
LOG.debug("Max local threads: " + maxMapThreads);
LOG.debug("Map tasks to process: " + this.numMapTasks);
// Create a new executor service to drain the work queue.
ThreadFactory tf = new ThreadFactoryBuilder()
.setNameFormat("LocalJobRunner Map Task Executor #%d")
.build();
ExecutorService executor = Executors.newFixedThreadPool(maxMapThreads, tf);
return executor;
}
/**
* Creates the executor service used to run reduce tasks.
*
* @return an ExecutorService instance that handles reduce tasks
*/
protected synchronized ExecutorService createReduceExecutor() {
// Determine the size of the thread pool to use
int maxReduceThreads = job.getInt(LOCAL_MAX_REDUCES, 1);
if (maxReduceThreads < 1) {
throw new IllegalArgumentException(
"Configured " + LOCAL_MAX_REDUCES + " must be >= 1");
}
maxReduceThreads = Math.min(maxReduceThreads, this.numReduceTasks);
maxReduceThreads = Math.max(maxReduceThreads, 1); // In case of no tasks.
LOG.debug("Starting reduce thread pool executor.");
LOG.debug("Max local threads: " + maxReduceThreads);
LOG.debug("Reduce tasks to process: " + this.numReduceTasks);
// Create a new executor service to drain the work queue.
ExecutorService executor = Executors.newFixedThreadPool(maxReduceThreads);
return executor;
}
/** Run a set of tasks and waits for them to complete. */
private void runTasks(List<RunnableWithThrowable> runnables,
ExecutorService service, String taskType) throws Exception {
// Start populating the executor with work units.
// They may begin running immediately (in other threads).
for (Runnable r : runnables) {
service.submit(r);
}
try {
service.shutdown(); // Instructs queue to drain.
// Wait for tasks to finish; do not use a time-based timeout.
// (See http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6179024)
LOG.info("Waiting for " + taskType + " tasks");
service.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
} catch (InterruptedException ie) {
// Cancel all threads.
service.shutdownNow();
throw ie;
}
LOG.info(taskType + " task executor complete.");
// After waiting for the tasks to complete, if any of these
// have thrown an exception, rethrow it now in the main thread context.
for (RunnableWithThrowable r : runnables) {
if (r.storedException != null) {
throw new Exception(r.storedException);
}
}
}
private org.apache.hadoop.mapreduce.OutputCommitter
createOutputCommitter(boolean newApiCommitter, JobID jobId, Configuration conf) throws Exception {
org.apache.hadoop.mapreduce.OutputCommitter committer = null;
LOG.info("OutputCommitter set in config "
+ conf.get("mapred.output.committer.class"));
if (newApiCommitter) {
org.apache.hadoop.mapreduce.TaskID taskId =
new org.apache.hadoop.mapreduce.TaskID(jobId, TaskType.MAP, 0);
org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptID =
new org.apache.hadoop.mapreduce.TaskAttemptID(taskId, 0);
org.apache.hadoop.mapreduce.TaskAttemptContext taskContext =
new TaskAttemptContextImpl(conf, taskAttemptID);
OutputFormat outputFormat =
ReflectionUtils.newInstance(taskContext.getOutputFormatClass(), conf);
committer = outputFormat.getOutputCommitter(taskContext);
} else {
committer = ReflectionUtils.newInstance(conf.getClass(
"mapred.output.committer.class", FileOutputCommitter.class,
org.apache.hadoop.mapred.OutputCommitter.class), conf);
}
LOG.info("OutputCommitter is " + committer.getClass().getName());
return committer;
}
@Override
public void run() {
JobID jobId = profile.getJobID();
JobContext jContext = new JobContextImpl(job, jobId);
org.apache.hadoop.mapreduce.OutputCommitter outputCommitter = null;
try {
outputCommitter = createOutputCommitter(conf.getUseNewMapper(), jobId, conf);
} catch (Exception e) {
LOG.info("Failed to createOutputCommitter", e);
return;
}
try {
TaskSplitMetaInfo[] taskSplitMetaInfos =
SplitMetaInfoReader.readSplitMetaInfo(jobId, localFs, conf, systemJobDir);
int numReduceTasks = job.getNumReduceTasks();
outputCommitter.setupJob(jContext);
status.setSetupProgress(1.0f);
Map<TaskAttemptID, MapOutputFile> mapOutputFiles =
Collections.synchronizedMap(new HashMap<TaskAttemptID, MapOutputFile>());
List<RunnableWithThrowable> mapRunnables = getMapTaskRunnables(
taskSplitMetaInfos, jobId, mapOutputFiles);
initCounters(mapRunnables.size(), numReduceTasks);
ExecutorService mapService = createMapExecutor();
runTasks(mapRunnables, mapService, "map");
try {
if (numReduceTasks > 0) {
List<RunnableWithThrowable> reduceRunnables = getReduceTaskRunnables(
jobId, mapOutputFiles);
ExecutorService reduceService = createReduceExecutor();
runTasks(reduceRunnables, reduceService, "reduce");
}
} finally {
for (MapOutputFile output : mapOutputFiles.values()) {
output.removeAll();
}
}
// delete the temporary directory in output directory
outputCommitter.commitJob(jContext);
status.setCleanupProgress(1.0f);
if (killed) {
this.status.setRunState(JobStatus.KILLED);
} else {
this.status.setRunState(JobStatus.SUCCEEDED);
}
JobEndNotifier.localRunnerNotification(job, status);
} catch (Throwable t) {
try {
outputCommitter.abortJob(jContext,
org.apache.hadoop.mapreduce.JobStatus.State.FAILED);
} catch (IOException ioe) {
LOG.info("Error cleaning up job:" + id);
}
status.setCleanupProgress(1.0f);
if (killed) {
this.status.setRunState(JobStatus.KILLED);
} else {
this.status.setRunState(JobStatus.FAILED);
}
LOG.warn(id, t);
JobEndNotifier.localRunnerNotification(job, status);
} finally {
try {
fs.delete(systemJobFile.getParent(), true); // delete submit dir
localFs.delete(localJobFile, true); // delete local copy
// Cleanup distributed cache
localDistributedCacheManager.close();
} catch (IOException e) {
LOG.warn("Error cleaning up "+id+": "+e);
}
}
}
// TaskUmbilicalProtocol methods
@Override
public JvmTask getTask(JvmContext context) { return null; }
@Override
public synchronized AMFeedback statusUpdate(TaskAttemptID taskId,
TaskStatus taskStatus) throws IOException, InterruptedException {
AMFeedback feedback = new AMFeedback();
feedback.setTaskFound(true);
if (null == taskStatus) {
return feedback;
}
// Serialize as we would if distributed in order to make deep copy
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
taskStatus.write(dos);
dos.close();
taskStatus = TaskStatus.createTaskStatus(taskStatus.getIsMap());
taskStatus.readFields(new DataInputStream(
new ByteArrayInputStream(baos.toByteArray())));
LOG.info(taskStatus.getStateString());
int mapTaskIndex = mapIds.indexOf(taskId);
if (mapTaskIndex >= 0) {
// mapping
float numTasks = (float) this.numMapTasks;
partialMapProgress[mapTaskIndex] = taskStatus.getProgress();
mapCounters[mapTaskIndex] = taskStatus.getCounters();
float partialProgress = 0.0f;
for (float f : partialMapProgress) {
partialProgress += f;
}
status.setMapProgress(partialProgress / numTasks);
} else {
// reducing
int reduceTaskIndex = taskId.getTaskID().getId();
float numTasks = (float) this.numReduceTasks;
partialReduceProgress[reduceTaskIndex] = taskStatus.getProgress();
reduceCounters[reduceTaskIndex] = taskStatus.getCounters();
float partialProgress = 0.0f;
for (float f : partialReduceProgress) {
partialProgress += f;
}
status.setReduceProgress(partialProgress / numTasks);
}
// ignore phase
return feedback;
}
/** Return the current values of the counters for this job,
* including tasks that are in progress.
*/
public synchronized Counters getCurrentCounters() {
if (null == mapCounters) {
// Counters not yet initialized for job.
return new Counters();
}
Counters current = new Counters();
for (Counters c : mapCounters) {
current = Counters.sum(current, c);
}
if (null != reduceCounters && reduceCounters.length > 0) {
for (Counters c : reduceCounters) {
current = Counters.sum(current, c);
}
}
return current;
}
/**
* Task is reporting that it is in commit_pending
* and it is waiting for the commit Response
*/
public void commitPending(TaskAttemptID taskid,
TaskStatus taskStatus)
throws IOException, InterruptedException {
statusUpdate(taskid, taskStatus);
}
@Override
public void reportDiagnosticInfo(TaskAttemptID taskid, String trace) {
// Ignore for now
}
@Override
public void reportNextRecordRange(TaskAttemptID taskid,
SortedRanges.Range range) throws IOException {
LOG.info("Task " + taskid + " reportedNextRecordRange " + range);
}
@Override
public boolean canCommit(TaskAttemptID taskid)
throws IOException {
return true;
}
@Override
public void done(TaskAttemptID taskId) throws IOException {
int taskIndex = mapIds.indexOf(taskId);
if (taskIndex >= 0) { // mapping
status.setMapProgress(1.0f);
} else {
status.setReduceProgress(1.0f);
}
}
@Override
public synchronized void fsError(TaskAttemptID taskId, String message)
throws IOException {
LOG.fatal("FSError: "+ message + "from task: " + taskId);
}
@Override
public void shuffleError(TaskAttemptID taskId, String message) throws IOException {
LOG.fatal("shuffleError: "+ message + "from task: " + taskId);
}
public synchronized void fatalError(TaskAttemptID taskId, String msg)
throws IOException {
LOG.fatal("Fatal: "+ msg + "from task: " + taskId);
}
@Override
public MapTaskCompletionEventsUpdate getMapCompletionEvents(JobID jobId,
int fromEventId, int maxLocs, TaskAttemptID id) throws IOException {
return new MapTaskCompletionEventsUpdate(
org.apache.hadoop.mapred.TaskCompletionEvent.EMPTY_ARRAY, false);
}
@Override
public void preempted(TaskAttemptID taskId, TaskStatus taskStatus)
throws IOException, InterruptedException {
// ignore
}
@Override
public TaskCheckpointID getCheckpointID(TaskID taskId) {
// ignore
return null;
}
@Override
public void setCheckpointID(TaskID downgrade, TaskCheckpointID cid) {
// ignore
}
}
public LocalJobRunner(Configuration conf) throws IOException {
this(new JobConf(conf));
}
@Deprecated
public LocalJobRunner(JobConf conf) throws IOException {
this.fs = FileSystem.getLocal(conf);
this.conf = conf;
myMetrics = new LocalJobRunnerMetrics(new JobConf(conf));
}
// JobSubmissionProtocol methods
private static int jobid = 0;
// used for making sure that local jobs run in different jvms don't
// collide on staging or job directories
private int randid;
public synchronized org.apache.hadoop.mapreduce.JobID getNewJobID() {
return new org.apache.hadoop.mapreduce.JobID("local" + randid, ++jobid);
}
public org.apache.hadoop.mapreduce.JobStatus submitJob(
org.apache.hadoop.mapreduce.JobID jobid, String jobSubmitDir,
Credentials credentials) throws IOException {
Job job = new Job(JobID.downgrade(jobid), jobSubmitDir);
job.job.setCredentials(credentials);
return job.status;
}
public void killJob(org.apache.hadoop.mapreduce.JobID id) {
jobs.get(JobID.downgrade(id)).killed = true;
jobs.get(JobID.downgrade(id)).interrupt();
}
public void setJobPriority(org.apache.hadoop.mapreduce.JobID id,
String jp) throws IOException {
throw new UnsupportedOperationException("Changing job priority " +
"in LocalJobRunner is not supported.");
}
/** Throws {@link UnsupportedOperationException} */
public boolean killTask(org.apache.hadoop.mapreduce.TaskAttemptID taskId,
boolean shouldFail) throws IOException {
throw new UnsupportedOperationException("Killing tasks in " +
"LocalJobRunner is not supported");
}
public org.apache.hadoop.mapreduce.TaskReport[] getTaskReports(
org.apache.hadoop.mapreduce.JobID id, TaskType type) {
return new org.apache.hadoop.mapreduce.TaskReport[0];
}
public org.apache.hadoop.mapreduce.JobStatus getJobStatus(
org.apache.hadoop.mapreduce.JobID id) {
Job job = jobs.get(JobID.downgrade(id));
if(job != null)
return job.status;
else
return null;
}
public org.apache.hadoop.mapreduce.Counters getJobCounters(
org.apache.hadoop.mapreduce.JobID id) {
Job job = jobs.get(JobID.downgrade(id));
return new org.apache.hadoop.mapreduce.Counters(job.getCurrentCounters());
}
public String getFilesystemName() throws IOException {
return fs.getUri().toString();
}
public ClusterMetrics getClusterMetrics() {
int numMapTasks = map_tasks.get();
int numReduceTasks = reduce_tasks.get();
return new ClusterMetrics(numMapTasks, numReduceTasks, numMapTasks,
numReduceTasks, 0, 0, 1, 1, jobs.size(), 1, 0, 0);
}
public JobTrackerStatus getJobTrackerStatus() {
return JobTrackerStatus.RUNNING;
}
public long getTaskTrackerExpiryInterval() throws IOException, InterruptedException {
return 0;
}
/**
* Get all active trackers in cluster.
* @return array of TaskTrackerInfo
*/
public TaskTrackerInfo[] getActiveTrackers()
throws IOException, InterruptedException {
return new TaskTrackerInfo[0];
}
/**
* Get all blacklisted trackers in cluster.
* @return array of TaskTrackerInfo
*/
public TaskTrackerInfo[] getBlacklistedTrackers()
throws IOException, InterruptedException {
return new TaskTrackerInfo[0];
}
public TaskCompletionEvent[] getTaskCompletionEvents(
org.apache.hadoop.mapreduce.JobID jobid
, int fromEventId, int maxEvents) throws IOException {
return TaskCompletionEvent.EMPTY_ARRAY;
}
public org.apache.hadoop.mapreduce.JobStatus[] getAllJobs() {return null;}
/**
* Returns the diagnostic information for a particular task in the given job.
* To be implemented
*/
public String[] getTaskDiagnostics(
org.apache.hadoop.mapreduce.TaskAttemptID taskid) throws IOException{
return new String [0];
}
/**
* @see org.apache.hadoop.mapreduce.protocol.ClientProtocol#getSystemDir()
*/
public String getSystemDir() {
Path sysDir = new Path(
conf.get(JTConfig.JT_SYSTEM_DIR, "/tmp/hadoop/mapred/system"));
return fs.makeQualified(sysDir).toString();
}
/**
* @see org.apache.hadoop.mapreduce.protocol.ClientProtocol#getQueueAdmins(String)
*/
public AccessControlList getQueueAdmins(String queueName) throws IOException {
return new AccessControlList(" ");// no queue admins for local job runner
}
/**
* @see org.apache.hadoop.mapreduce.protocol.ClientProtocol#getStagingAreaDir()
*/
public String getStagingAreaDir() throws IOException {
Path stagingRootDir = new Path(conf.get(JTConfig.JT_STAGING_AREA_ROOT,
"/tmp/hadoop/mapred/staging"));
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
String user;
randid = rand.nextInt(Integer.MAX_VALUE);
if (ugi != null) {
user = ugi.getShortUserName() + randid;
} else {
user = "dummy" + randid;
}
return fs.makeQualified(new Path(stagingRootDir, user+"/.staging")).toString();
}
public String getJobHistoryDir() {
return null;
}
@Override
public QueueInfo[] getChildQueues(String queueName) throws IOException {
return null;
}
@Override
public QueueInfo[] getRootQueues() throws IOException {
return null;
}
@Override
public QueueInfo[] getQueues() throws IOException {
return null;
}
@Override
public QueueInfo getQueue(String queue) throws IOException {
return null;
}
@Override
public org.apache.hadoop.mapreduce.QueueAclsInfo[]
getQueueAclsForCurrentUser() throws IOException{
return null;
}
/**
* Set the max number of map tasks to run concurrently in the LocalJobRunner.
* @param job the job to configure
* @param maxMaps the maximum number of map tasks to allow.
*/
public static void setLocalMaxRunningMaps(
org.apache.hadoop.mapreduce.JobContext job,
int maxMaps) {
job.getConfiguration().setInt(LOCAL_MAX_MAPS, maxMaps);
}
/**
* @return the max number of map tasks to run concurrently in the
* LocalJobRunner.
*/
public static int getLocalMaxRunningMaps(
org.apache.hadoop.mapreduce.JobContext job) {
return job.getConfiguration().getInt(LOCAL_MAX_MAPS, 1);
}
/**
* Set the max number of reduce tasks to run concurrently in the LocalJobRunner.
* @param job the job to configure
* @param maxReduces the maximum number of reduce tasks to allow.
*/
public static void setLocalMaxRunningReduces(
org.apache.hadoop.mapreduce.JobContext job,
int maxReduces) {
job.getConfiguration().setInt(LOCAL_MAX_REDUCES, maxReduces);
}
/**
* @return the max number of reduce tasks to run concurrently in the
* LocalJobRunner.
*/
public static int getLocalMaxRunningReduces(
org.apache.hadoop.mapreduce.JobContext job) {
return job.getConfiguration().getInt(LOCAL_MAX_REDUCES, 1);
}
@Override
public void cancelDelegationToken(Token<DelegationTokenIdentifier> token
) throws IOException,
InterruptedException {
}
@Override
public Token<DelegationTokenIdentifier>
getDelegationToken(Text renewer) throws IOException, InterruptedException {
return null;
}
@Override
public long renewDelegationToken(Token<DelegationTokenIdentifier> token
) throws IOException,InterruptedException{
return 0;
}
@Override
public LogParams getLogFileParams(org.apache.hadoop.mapreduce.JobID jobID,
org.apache.hadoop.mapreduce.TaskAttemptID taskAttemptID)
throws IOException, InterruptedException {
throw new UnsupportedOperationException("Not supported");
}
static void setupChildMapredLocalDirs(Task t, JobConf conf) {
String[] localDirs = conf.getTrimmedStrings(MRConfig.LOCAL_DIR);
String jobId = t.getJobID().toString();
String taskId = t.getTaskID().toString();
boolean isCleanup = t.isTaskCleanupTask();
String user = t.getUser();
StringBuffer childMapredLocalDir =
new StringBuffer(localDirs[0] + Path.SEPARATOR
+ getLocalTaskDir(user, jobId, taskId, isCleanup));
for (int i = 1; i < localDirs.length; i++) {
childMapredLocalDir.append("," + localDirs[i] + Path.SEPARATOR
+ getLocalTaskDir(user, jobId, taskId, isCleanup));
}
LOG.debug(MRConfig.LOCAL_DIR + " for child : " + childMapredLocalDir);
conf.set(MRConfig.LOCAL_DIR, childMapredLocalDir.toString());
}
static final String TASK_CLEANUP_SUFFIX = ".cleanup";
static final String JOBCACHE = "jobcache";
static String getLocalTaskDir(String user, String jobid, String taskid,
boolean isCleanupAttempt) {
String taskDir = jobDir + Path.SEPARATOR + user + Path.SEPARATOR + JOBCACHE
+ Path.SEPARATOR + jobid + Path.SEPARATOR + taskid;
if (isCleanupAttempt) {
taskDir = taskDir + TASK_CLEANUP_SUFFIX;
}
return taskDir;
}
}
|
apache/incubator-atlas | 35,990 | repository/src/main/java/org/apache/atlas/services/DefaultMetadataService.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.services;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.AtlasErrorCode;
import org.apache.atlas.AtlasException;
import org.apache.atlas.CreateUpdateEntitiesResult;
import org.apache.atlas.EntityAuditEvent;
import org.apache.atlas.RequestContext;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.ha.HAConfiguration;
import org.apache.atlas.listener.ActiveStateChangeHandler;
import org.apache.atlas.listener.ChangedTypeDefs;
import org.apache.atlas.listener.EntityChangeListener;
import org.apache.atlas.listener.TypeDefChangeListener;
import org.apache.atlas.listener.TypesChangeListener;
import org.apache.atlas.model.legacy.EntityResult;
import org.apache.atlas.repository.MetadataRepository;
import org.apache.atlas.repository.RepositoryException;
import org.apache.atlas.repository.audit.EntityAuditRepository;
import org.apache.atlas.repository.graph.GraphHelper;
import org.apache.atlas.repository.typestore.ITypeStore;
import org.apache.atlas.type.AtlasTypeUtil;
import org.apache.atlas.typesystem.IReferenceableInstance;
import org.apache.atlas.typesystem.IStruct;
import org.apache.atlas.typesystem.ITypedReferenceableInstance;
import org.apache.atlas.typesystem.ITypedStruct;
import org.apache.atlas.typesystem.Referenceable;
import org.apache.atlas.typesystem.Struct;
import org.apache.atlas.typesystem.TypesDef;
import org.apache.atlas.typesystem.exception.EntityNotFoundException;
import org.apache.atlas.typesystem.exception.TypeNotFoundException;
import org.apache.atlas.typesystem.json.InstanceSerialization;
import org.apache.atlas.typesystem.json.TypesSerialization;
import org.apache.atlas.typesystem.persistence.Id;
import org.apache.atlas.typesystem.persistence.ReferenceableInstance;
import org.apache.atlas.typesystem.types.*;
import org.apache.atlas.typesystem.types.cache.TypeCache;
import org.apache.atlas.utils.ParamChecker;
import org.apache.commons.configuration.Configuration;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Simple wrapper over TypeSystem and MetadataRepository services with hooks
* for listening to changes to the repository.
*/
@Singleton
@Component
@Deprecated
public class DefaultMetadataService implements MetadataService, ActiveStateChangeHandler, TypeDefChangeListener {
private enum OperationType {
CREATE, UPDATE, DELETE
};
private static final Logger LOG = LoggerFactory.getLogger(DefaultMetadataService.class);
private final short maxAuditResults;
private static final String CONFIG_MAX_AUDIT_RESULTS = "atlas.audit.maxResults";
private static final short DEFAULT_MAX_AUDIT_RESULTS = 1000;
private final TypeSystem typeSystem;
private final MetadataRepository repository;
private final ITypeStore typeStore;
private final Collection<TypesChangeListener> typeChangeListeners = new LinkedHashSet<>();
private final Collection<EntityChangeListener> entityChangeListeners = new LinkedHashSet<>();
private EntityAuditRepository auditRepository;
@Inject
public DefaultMetadataService(final MetadataRepository repository, final ITypeStore typeStore,
final Set<TypesChangeListener> typesChangeListeners,
final Set<EntityChangeListener> entityChangeListeners,
final TypeSystem typeSystem,
final Configuration configuration,
TypeCache typeCache,
EntityAuditRepository auditRepository) throws AtlasException {
this.typeStore = typeStore;
this.typeSystem = typeSystem;
/**
* Ideally a TypeCache implementation should have been injected in the TypeSystemProvider,
* but a singleton of TypeSystem is constructed privately within the class so that
* clients of TypeSystem would never instantiate a TypeSystem object directly in
* their code. As soon as a client makes a call to TypeSystem.getInstance(), they
* should have the singleton ready for consumption. Manually inject TypeSystem with
* the Guice-instantiated type cache here, before types are restored.
* This allows cache implementations to participate in Guice dependency injection.
*/
this.typeSystem.setTypeCache(typeCache);
this.repository = repository;
this.typeChangeListeners.addAll(typesChangeListeners);
this.entityChangeListeners.addAll(entityChangeListeners);
if (!HAConfiguration.isHAEnabled(configuration)) {
restoreTypeSystem();
}
maxAuditResults = configuration.getShort(CONFIG_MAX_AUDIT_RESULTS, DEFAULT_MAX_AUDIT_RESULTS);
this.auditRepository = auditRepository;
}
private void restoreTypeSystem() throws AtlasException {
LOG.info("Restoring type system from the store");
TypesDef typesDef = typeStore.restore();
refreshCache(typesDef);
LOG.info("Restored type system from the store");
}
private void refreshCache(TypesDef typesDef) throws AtlasException {
if (typesDef != null && !typesDef.isEmpty()) {
TypeSystem.TransientTypeSystem transientTypeSystem = typeSystem.createTransientTypeSystem(typesDef, true);
Map<String, IDataType> typesAdded = transientTypeSystem.getTypesAdded();
LOG.info("Number of types got from transient type system: {}", typesAdded.size());
typeSystem.commitTypes(typesAdded);
}
}
/**
* Creates a new type based on the type system to enable adding
* entities (instances for types).
*
* @param typeDefinition definition as json
* @return a unique id for this type
*/
@Override
public JSONObject createType(String typeDefinition) throws AtlasException {
return createOrUpdateTypes(OperationType.CREATE, typeDefinition, false);
}
private JSONObject createOrUpdateTypes(OperationType opType, String typeDefinition, boolean isUpdate) throws AtlasException {
typeDefinition = ParamChecker.notEmpty(typeDefinition, "type definition");
TypesDef typesDef = validateTypeDefinition(opType, typeDefinition);
try {
final TypeSystem.TransientTypeSystem transientTypeSystem = typeSystem.createTransientTypeSystem(typesDef, isUpdate);
final Map<String, IDataType> typesAdded = transientTypeSystem.getTypesAdded();
try {
/* Create indexes first so that if index creation fails then we rollback
the typesystem and also do not persist the graph
*/
if (isUpdate) {
onTypesUpdated(typesAdded);
} else {
onTypesAdded(typesAdded);
}
typeStore.store(transientTypeSystem, ImmutableList.copyOf(typesAdded.keySet()));
typeSystem.commitTypes(typesAdded);
} catch (Throwable t) {
throw new AtlasException("Unable to persist types ", t);
}
return new JSONObject() {{
put(AtlasClient.TYPES, typesAdded.keySet());
}};
} catch (JSONException e) {
LOG.error("Unable to create response for types={}", typeDefinition, e);
throw new AtlasException("Unable to create response ", e);
}
}
@Override
public JSONObject updateType(String typeDefinition) throws AtlasException {
return createOrUpdateTypes(OperationType.UPDATE, typeDefinition, true);
}
private TypesDef validateTypeDefinition(OperationType opType, String typeDefinition) throws AtlasException {
final String exceptionErrorMessageFormat = "%s for '%s' failed: %s";
try {
TypesDef typesDef = TypesSerialization.fromJson(typeDefinition);
if (typesDef.isEmpty()) {
throw new IllegalArgumentException("Invalid type definition");
}
for (HierarchicalTypeDefinition<ClassType> t : typesDef.classTypesAsJavaList()) {
if (!AtlasTypeUtil.isValidTypeName(t.typeName))
throw new AtlasException(
String.format(exceptionErrorMessageFormat, opType.toString(), t.typeName, AtlasTypeUtil.getInvalidTypeNameErrorMessage()));
}
for (StructTypeDefinition t : typesDef.structTypesAsJavaList()) {
if (!AtlasTypeUtil.isValidTypeName(t.typeName))
throw new AtlasException(
String.format(exceptionErrorMessageFormat, opType.toString(), t.typeName, AtlasTypeUtil.getInvalidTypeNameErrorMessage()));
}
for (EnumTypeDefinition t : typesDef.enumTypesAsJavaList()) {
if (!AtlasTypeUtil.isValidTypeName(t.name))
throw new AtlasException(
String.format(exceptionErrorMessageFormat, opType.toString(), t.name, AtlasTypeUtil.getInvalidTypeNameErrorMessage()));
}
for (HierarchicalTypeDefinition<TraitType> t : typesDef.traitTypesAsJavaList()) {
if (!AtlasTypeUtil.isValidTraitTypeName(t.typeName))
throw new AtlasException(
String.format(exceptionErrorMessageFormat, opType.toString(), t.typeName, AtlasTypeUtil.getInvalidTraitTypeNameErrorMessage()));
}
return typesDef;
}
catch (Exception e) {
LOG.error("Unable to deserialize json={}", typeDefinition, e);
throw new IllegalArgumentException("Unable to deserialize json " + typeDefinition, e);
}
}
/**
* Return the definition for the given type.
*
* @param typeName name for this type, must be unique
* @return type definition as JSON
*/
@Override
public String getTypeDefinition(String typeName) throws AtlasException {
final IDataType dataType = typeSystem.getDataType(IDataType.class, typeName);
return TypesSerialization.toJson(typeSystem, dataType.getName());
}
/**
* Return the list of type names in the type system which match the specified filter.
*
* @return list of type names
* @param filterMap - Map of filter for type names. Valid keys are CATEGORY, SUPERTYPE, NOT_SUPERTYPE
* For example, CATEGORY = TRAIT && SUPERTYPE contains 'X' && SUPERTYPE !contains 'Y'
* If there is no filter, all the types are returned
*/
@Override
public List<String> getTypeNames(Map<TypeCache.TYPE_FILTER, String> filterMap) throws AtlasException {
return typeSystem.getTypeNames(filterMap);
}
/**
* Creates an entity, instance of the type.
*
* @param entityInstanceDefinition json array of entity definitions
* @return guids - list of guids
*/
@Override
public CreateUpdateEntitiesResult createEntities(String entityInstanceDefinition) throws AtlasException {
entityInstanceDefinition = ParamChecker.notEmpty(entityInstanceDefinition, "Entity instance definition");
ITypedReferenceableInstance[] typedInstances = deserializeClassInstances(entityInstanceDefinition);
return createEntities(typedInstances);
}
public CreateUpdateEntitiesResult createEntities(ITypedReferenceableInstance[] typedInstances) throws AtlasException {
final CreateUpdateEntitiesResult result = repository.createEntities(typedInstances);
onEntitiesAdded(result.getCreatedEntities());
return result;
}
@Override
public ITypedReferenceableInstance[] deserializeClassInstances(String entityInstanceDefinition) throws AtlasException {
return GraphHelper.deserializeClassInstances(typeSystem, entityInstanceDefinition);
}
@Override
public ITypedReferenceableInstance getTypedReferenceableInstance(Referenceable entityInstance) throws AtlasException {
return GraphHelper.getTypedReferenceableInstance(typeSystem, entityInstance);
}
/**
* Return the definition for the given guid.
*
* @param guid guid
* @return entity definition as JSON
*/
@Override
public String getEntityDefinitionJson(String guid) throws AtlasException {
guid = ParamChecker.notEmpty(guid, "entity id");
final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
return InstanceSerialization.toJson(instance, true);
}
/**
* Return the definition for the given guid.
*
* @param guid guid
* @return entity definition as JSON
*/
@Override
public ITypedReferenceableInstance getEntityDefinition(String guid) throws AtlasException {
guid = ParamChecker.notEmpty(guid, "entity id");
final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
return instance;
}
@Override
public ITypedReferenceableInstance getEntityDefinitionReference(String entityType, String attribute, String value)
throws AtlasException {
validateTypeExists(entityType);
validateUniqueAttribute(entityType, attribute);
return repository.getEntityDefinition(entityType, attribute, value);
}
@Override
public String getEntityDefinition(String entityType, String attribute, String value) throws AtlasException {
final ITypedReferenceableInstance instance = getEntityDefinitionReference(entityType, attribute, value);
return InstanceSerialization.toJson(instance, true);
}
/**
* Validate that attribute is unique attribute
* @param entityType the entity type
* @param attributeName the name of the attribute
*/
private void validateUniqueAttribute(String entityType, String attributeName) throws AtlasException {
ClassType type = typeSystem.getDataType(ClassType.class, entityType);
AttributeInfo attribute = type.fieldMapping().fields.get(attributeName);
if(attribute == null) {
throw new IllegalArgumentException(
String.format("%s is not an attribute in %s", attributeName, entityType));
}
if (!attribute.isUnique) {
throw new IllegalArgumentException(
String.format("%s.%s is not a unique attribute", entityType, attributeName));
}
}
/**
* Return the list of entity guids for the given type in the repository.
*
* @param entityType type
* @return list of entity guids for the given type in the repository
*/
@Override
public List<String> getEntityList(String entityType) throws AtlasException {
validateTypeExists(entityType);
return repository.getEntityList(entityType);
}
/**
* Updates an entity, instance of the type based on the guid set.
*
* @param entityInstanceDefinition json array of entity definitions
* @return guids - json array of guids
*/
@Override
public CreateUpdateEntitiesResult updateEntities(String entityInstanceDefinition) throws AtlasException {
entityInstanceDefinition = ParamChecker.notEmpty(entityInstanceDefinition, "Entity instance definition");
ITypedReferenceableInstance[] typedInstances = deserializeClassInstances(entityInstanceDefinition);
CreateUpdateEntitiesResult result = repository.updateEntities(typedInstances);
onEntitiesAddedUpdated(result.getEntityResult());
return result;
}
/**
* Updates an entity, instance of the type based on the guid set.
*
* @param entityInstanceDefinitions
* @return guids - json array of guids
*/
@Override
public CreateUpdateEntitiesResult updateEntities(ITypedReferenceableInstance[] entityInstanceDefinitions) throws AtlasException {
CreateUpdateEntitiesResult result = repository.updateEntities(entityInstanceDefinitions);
onEntitiesAddedUpdated(result.getEntityResult());
return result;
}
private void onEntitiesAddedUpdated(EntityResult entityResult) throws AtlasException {
onEntitiesAdded(entityResult.getCreatedEntities());
onEntitiesUpdated(entityResult.getUpdateEntities());
//Note: doesn't access deletedEntities from entityResult
onEntitiesDeleted(RequestContext.get().getDeletedEntities());
}
@Override
public CreateUpdateEntitiesResult updateEntityAttributeByGuid(String guid, String attributeName,
String value) throws AtlasException {
guid = ParamChecker.notEmpty(guid, "entity id");
attributeName = ParamChecker.notEmpty(attributeName, "attribute name");
value = ParamChecker.notEmpty(value, "attribute value");
ITypedReferenceableInstance existInstance = validateEntityExists(guid);
ClassType type = typeSystem.getDataType(ClassType.class, existInstance.getTypeName());
ITypedReferenceableInstance newInstance = type.createInstance();
AttributeInfo attributeInfo = type.fieldMapping.fields.get(attributeName);
if (attributeInfo == null) {
throw new AtlasException("Invalid property " + attributeName + " for entity " + existInstance.getTypeName());
}
DataTypes.TypeCategory attrTypeCategory = attributeInfo.dataType().getTypeCategory();
switch(attrTypeCategory) {
case PRIMITIVE:
newInstance.set(attributeName, value);
break;
case CLASS:
Id id = new Id(value, 0, attributeInfo.dataType().getName());
newInstance.set(attributeName, id);
break;
default:
throw new AtlasException("Update of " + attrTypeCategory + " is not supported");
}
((ReferenceableInstance)newInstance).replaceWithNewId(new Id(guid, 0, newInstance.getTypeName()));
CreateUpdateEntitiesResult result = repository.updatePartial(newInstance);
onEntitiesAddedUpdated(result.getEntityResult());
return result;
}
private ITypedReferenceableInstance validateEntityExists(String guid)
throws EntityNotFoundException, RepositoryException {
final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
if (instance == null) {
throw new EntityNotFoundException(String.format("Entity with guid %s not found ", guid));
}
return instance;
}
@Override
public CreateUpdateEntitiesResult updateEntityPartialByGuid(String guid, Referenceable newEntity)
throws AtlasException {
guid = ParamChecker.notEmpty(guid, "guid cannot be null");
newEntity = ParamChecker.notNull(newEntity, "updatedEntity cannot be null");
ITypedReferenceableInstance existInstance = validateEntityExists(guid);
ITypedReferenceableInstance newInstance = validateAndConvertToTypedInstance(newEntity, existInstance.getTypeName());
((ReferenceableInstance)newInstance).replaceWithNewId(new Id(guid, 0, newInstance.getTypeName()));
CreateUpdateEntitiesResult result = repository.updatePartial(newInstance);
onEntitiesAddedUpdated(result.getEntityResult());
return result;
}
@Override
public ITypedReferenceableInstance validateAndConvertToTypedInstance(IReferenceableInstance updatedEntity, String typeName)
throws AtlasException {
ClassType type = typeSystem.getDataType(ClassType.class, typeName);
ITypedReferenceableInstance newInstance = type.createInstance(updatedEntity.getId());
for (String attributeName : updatedEntity.getValuesMap().keySet()) {
AttributeInfo attributeInfo = type.fieldMapping.fields.get(attributeName);
if (attributeInfo == null) {
throw new AtlasException("Invalid property " + attributeName + " for entity " + updatedEntity);
}
DataTypes.TypeCategory attrTypeCategory = attributeInfo.dataType().getTypeCategory();
Object value = updatedEntity.get(attributeName);
switch (attrTypeCategory) {
case CLASS:
if (value != null) {
if (value instanceof Referenceable) {
newInstance.set(attributeName, value);
} else {
Id id = new Id((String) value, 0, attributeInfo.dataType().getName());
newInstance.set(attributeName, id);
}
}
break;
case ENUM:
case PRIMITIVE:
case ARRAY:
case STRUCT:
case MAP:
newInstance.set(attributeName, value);
break;
case TRAIT:
//TODO - handle trait updates as well?
default:
throw new AtlasException("Update of " + attrTypeCategory + " is not supported");
}
}
return newInstance;
}
@Override
public CreateUpdateEntitiesResult updateEntityByUniqueAttribute(String typeName, String uniqueAttributeName,
String attrValue,
Referenceable updatedEntity) throws AtlasException {
typeName = ParamChecker.notEmpty(typeName, "typeName");
uniqueAttributeName = ParamChecker.notEmpty(uniqueAttributeName, "uniqueAttributeName");
attrValue = ParamChecker.notNull(attrValue, "unique attribute value");
updatedEntity = ParamChecker.notNull(updatedEntity, "updatedEntity");
ITypedReferenceableInstance oldInstance = getEntityDefinitionReference(typeName, uniqueAttributeName, attrValue);
final ITypedReferenceableInstance newInstance = validateAndConvertToTypedInstance(updatedEntity, typeName);
((ReferenceableInstance)newInstance).replaceWithNewId(oldInstance.getId());
CreateUpdateEntitiesResult result = repository.updatePartial(newInstance);
onEntitiesAddedUpdated(result.getEntityResult());
return result;
}
private void validateTypeExists(String entityType) throws AtlasException {
entityType = ParamChecker.notEmpty(entityType, "entity type");
IDataType type = typeSystem.getDataType(IDataType.class, entityType);
if (type.getTypeCategory() != DataTypes.TypeCategory.CLASS) {
throw new IllegalArgumentException("type " + entityType + " not a CLASS type");
}
}
/**
* Gets the list of trait names for a given entity represented by a guid.
*
* @param guid globally unique identifier for the entity
* @return a list of trait names for the given entity guid
* @throws AtlasException
*/
@Override
public List<String> getTraitNames(String guid) throws AtlasException {
guid = ParamChecker.notEmpty(guid, "entity id");
return repository.getTraitNames(guid);
}
/**
* Adds a new trait to the list of existing entities represented by their respective guids
* @param entityGuids list of guids of entities
* @param traitInstance trait instance json that needs to be added to entities
* @throws AtlasException
*/
@Override
public void addTrait(List<String> entityGuids, ITypedStruct traitInstance) throws AtlasException {
Preconditions.checkNotNull(entityGuids, "entityGuids list cannot be null");
Preconditions.checkNotNull(traitInstance, "Trait instance cannot be null");
final String traitName = traitInstance.getTypeName();
// ensure trait type is already registered with the TypeSystem
if (!typeSystem.isRegistered(traitName)) {
String msg = String.format("trait=%s should be defined in type system before it can be added", traitName);
LOG.error(msg);
throw new TypeNotFoundException(msg);
}
//ensure trait is not already registered with any of the given entities
for (String entityGuid : entityGuids) {
Preconditions.checkArgument(!getTraitNames(entityGuid).contains(traitName),
"trait=%s is already defined for entity=%s", traitName, entityGuid);
}
repository.addTrait(entityGuids, traitInstance);
for (String entityGuid : entityGuids) {
onTraitAddedToEntity(repository.getEntityDefinition(entityGuid), traitInstance);
}
}
/**
* Adds a new trait to an existing entity represented by a guid.
*
* @param guid globally unique identifier for the entity
* @param traitInstanceDefinition trait instance json that needs to be added to entity
* @throws AtlasException
*/
@Override
public void addTrait(String guid, String traitInstanceDefinition) throws AtlasException {
guid = ParamChecker.notEmpty(guid, "entity id");
traitInstanceDefinition = ParamChecker.notEmpty(traitInstanceDefinition, "trait instance definition");
ITypedStruct traitInstance = deserializeTraitInstance(traitInstanceDefinition);
addTrait(guid, traitInstance);
}
public void addTrait(String guid, ITypedStruct traitInstance) throws AtlasException {
final String traitName = traitInstance.getTypeName();
// ensure trait type is already registered with the TS
if (!typeSystem.isRegistered(traitName)) {
String msg = String.format("trait=%s should be defined in type system before it can be added", traitName);
LOG.error(msg);
throw new TypeNotFoundException(msg);
}
// ensure trait is not already defined
Preconditions
.checkArgument(!getTraitNames(guid).contains(traitName), "trait=%s is already defined for entity=%s",
traitName, guid);
repository.addTrait(guid, traitInstance);
onTraitAddedToEntity(repository.getEntityDefinition(guid), traitInstance);
}
private ITypedStruct deserializeTraitInstance(String traitInstanceDefinition)
throws AtlasException {
return createTraitInstance(InstanceSerialization.fromJsonStruct(traitInstanceDefinition, true));
}
@Override
public ITypedStruct createTraitInstance(Struct traitInstance) throws AtlasException {
try {
final String entityTypeName = ParamChecker.notEmpty(traitInstance.getTypeName(), "entity type");
TraitType traitType = typeSystem.getDataType(TraitType.class, entityTypeName);
return traitType.convert(traitInstance, Multiplicity.REQUIRED);
} catch (TypeNotFoundException e) {
throw e;
} catch (Exception e) {
throw new AtlasException("Error deserializing trait instance", e);
}
}
@Override
public IStruct getTraitDefinition(String guid, final String traitName) throws AtlasException {
guid = ParamChecker.notEmpty(guid, "entity id");
final ITypedReferenceableInstance instance = repository.getEntityDefinition(guid);
return instance.getTrait(traitName);
}
/**
* Deletes a given trait from an existing entity represented by a guid.
*
* @param guid globally unique identifier for the entity
* @param traitNameToBeDeleted name of the trait
* @throws AtlasException
*/
@Override
public void deleteTrait(String guid, String traitNameToBeDeleted) throws AtlasException {
guid = ParamChecker.notEmpty(guid, "entity id");
traitNameToBeDeleted = ParamChecker.notEmpty(traitNameToBeDeleted, "trait name");
// ensure trait type is already registered with the TS
if (!typeSystem.isRegistered(traitNameToBeDeleted)) {
final String msg = String.format("trait=%s should be defined in type system before it can be deleted",
traitNameToBeDeleted);
LOG.error(msg);
throw new TypeNotFoundException(msg);
}
repository.deleteTrait(guid, traitNameToBeDeleted);
onTraitDeletedFromEntity(repository.getEntityDefinition(guid), traitNameToBeDeleted);
}
private void onTypesAdded(Map<String, IDataType> typesAdded) throws AtlasException {
for (TypesChangeListener listener : typeChangeListeners) {
listener.onAdd(typesAdded.values());
}
}
private void onEntitiesAdded(List<String> guids) throws AtlasException {
List<ITypedReferenceableInstance> entities = loadEntities(guids);
for (EntityChangeListener listener : entityChangeListeners) {
listener.onEntitiesAdded(entities, false);
}
}
private List<ITypedReferenceableInstance> loadEntities(List<String> guids) throws RepositoryException, EntityNotFoundException {
return repository.getEntityDefinitions(guids.toArray(new String[guids.size()]));
}
private void onTypesUpdated(Map<String, IDataType> typesUpdated) throws AtlasException {
for (TypesChangeListener listener : typeChangeListeners) {
listener.onChange(typesUpdated.values());
}
}
private void onEntitiesUpdated(List<String> guids) throws AtlasException {
List<ITypedReferenceableInstance> entities = loadEntities(guids);
for (EntityChangeListener listener : entityChangeListeners) {
listener.onEntitiesUpdated(entities, false);
}
}
private void onTraitAddedToEntity(ITypedReferenceableInstance entity, IStruct trait) throws AtlasException {
Collection<IStruct> traits = Collections.singletonList(trait);
for (EntityChangeListener listener : entityChangeListeners) {
listener.onTraitsAdded(entity, traits);
}
}
private void onTraitDeletedFromEntity(ITypedReferenceableInstance entity, String traitName) throws AtlasException {
Collection<String> traitNames = Collections.singletonList(traitName);
for (EntityChangeListener listener : entityChangeListeners) {
listener.onTraitsDeleted(entity, traitNames);
}
}
public void registerListener(EntityChangeListener listener) {
entityChangeListeners.add(listener);
}
public void unregisterListener(EntityChangeListener listener) {
entityChangeListeners.remove(listener);
}
@Override
public List<EntityAuditEvent> getAuditEvents(String guid, String startKey, short count) throws AtlasException {
guid = ParamChecker.notEmpty(guid, "entity id");
startKey = ParamChecker.notEmptyIfNotNull(startKey, "start key");
ParamChecker.lessThan(count, maxAuditResults, "count");
return auditRepository.listEvents(guid, startKey, count);
}
/* (non-Javadoc)
* @see org.apache.atlas.services.MetadataService#deleteEntities(java.lang.String)
*/
@Override
public EntityResult deleteEntities(List<String> deleteCandidateGuids) throws AtlasException {
ParamChecker.notEmpty(deleteCandidateGuids, "delete candidate guids");
return deleteGuids(deleteCandidateGuids);
}
@Override
public EntityResult deleteEntityByUniqueAttribute(String typeName, String uniqueAttributeName,
String attrValue) throws AtlasException {
typeName = ParamChecker.notEmpty(typeName, "delete candidate typeName");
uniqueAttributeName = ParamChecker.notEmpty(uniqueAttributeName, "delete candidate unique attribute name");
attrValue = ParamChecker.notEmpty(attrValue, "delete candidate unique attribute value");
//Throws EntityNotFoundException if the entity could not be found by its unique attribute
ITypedReferenceableInstance instance = getEntityDefinitionReference(typeName, uniqueAttributeName, attrValue);
final Id instanceId = instance.getId();
List<String> deleteCandidateGuids = new ArrayList<String>() {{ add(instanceId._getId());}};
return deleteGuids(deleteCandidateGuids);
}
private EntityResult deleteGuids(List<String> deleteCandidateGuids) throws AtlasException {
EntityResult entityResult = repository.deleteEntities(deleteCandidateGuids);
onEntitiesAddedUpdated(entityResult);
return entityResult;
}
private void onEntitiesDeleted(List<ITypedReferenceableInstance> entities) throws AtlasException {
for (EntityChangeListener listener : entityChangeListeners) {
listener.onEntitiesDeleted(entities, false);
}
}
/**
* Create or restore the {@link TypeSystem} cache on server activation.
*
* When an instance is passive, types could be created outside of its cache by the active instance.
* Hence, when this instance becomes active, it needs to restore the cache from the backend store.
* The first time initialization happens, the indices for these types also needs to be created.
* This must happen only from the active instance, as it updates shared backend state.
*/
@Override
public void instanceIsActive() throws AtlasException {
LOG.info("Reacting to active state: restoring type system");
restoreTypeSystem();
}
@Override
public void instanceIsPassive() {
LOG.info("Reacting to passive state: no action right now");
}
@Override
public void onChange(ChangedTypeDefs changedTypeDefs) throws AtlasBaseException {
// All we need here is a restore of the type-system
LOG.info("TypeSystem reset invoked by TypeRegistry changes");
try {
TypesDef typesDef = typeStore.restore();
typeSystem.reset();
TypeSystem.TransientTypeSystem transientTypeSystem
= typeSystem.createTransientTypeSystem(typesDef, false);
Map<String, IDataType> typesAdded = transientTypeSystem.getTypesAdded();
LOG.info("Number of types got from transient type system: {}", typesAdded.size());
typeSystem.commitTypes(typesAdded);
} catch (AtlasException e) {
LOG.error("Failed to restore type-system after TypeRegistry changes", e);
throw new AtlasBaseException(AtlasErrorCode.INTERNAL_ERROR, e);
}
}
}
|
googleapis/google-cloud-java | 35,707 | java-contentwarehouse/proto-google-cloud-contentwarehouse-v1/src/main/java/com/google/cloud/contentwarehouse/v1/LockDocumentRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/contentwarehouse/v1/document_service_request.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.contentwarehouse.v1;
/**
*
*
* <pre>
* Request message for DocumentService.LockDocument.
* </pre>
*
* Protobuf type {@code google.cloud.contentwarehouse.v1.LockDocumentRequest}
*/
public final class LockDocumentRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.contentwarehouse.v1.LockDocumentRequest)
LockDocumentRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use LockDocumentRequest.newBuilder() to construct.
private LockDocumentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private LockDocumentRequest() {
name_ = "";
collectionId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new LockDocumentRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contentwarehouse.v1.DocumentServiceRequestProto
.internal_static_google_cloud_contentwarehouse_v1_LockDocumentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contentwarehouse.v1.DocumentServiceRequestProto
.internal_static_google_cloud_contentwarehouse_v1_LockDocumentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contentwarehouse.v1.LockDocumentRequest.class,
com.google.cloud.contentwarehouse.v1.LockDocumentRequest.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the document to lock.
* Format:
* projects/{project_number}/locations/{location}/documents/{document}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The name of the document to lock.
* Format:
* projects/{project_number}/locations/{location}/documents/{document}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int COLLECTION_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object collectionId_ = "";
/**
*
*
* <pre>
* The collection the document connects to.
* </pre>
*
* <code>string collection_id = 2;</code>
*
* @return The collectionId.
*/
@java.lang.Override
public java.lang.String getCollectionId() {
java.lang.Object ref = collectionId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
collectionId_ = s;
return s;
}
}
/**
*
*
* <pre>
* The collection the document connects to.
* </pre>
*
* <code>string collection_id = 2;</code>
*
* @return The bytes for collectionId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getCollectionIdBytes() {
java.lang.Object ref = collectionId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
collectionId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int LOCKING_USER_FIELD_NUMBER = 3;
private com.google.cloud.contentwarehouse.v1.UserInfo lockingUser_;
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*
* @return Whether the lockingUser field is set.
*/
@java.lang.Override
public boolean hasLockingUser() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*
* @return The lockingUser.
*/
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.UserInfo getLockingUser() {
return lockingUser_ == null
? com.google.cloud.contentwarehouse.v1.UserInfo.getDefaultInstance()
: lockingUser_;
}
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*/
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.UserInfoOrBuilder getLockingUserOrBuilder() {
return lockingUser_ == null
? com.google.cloud.contentwarehouse.v1.UserInfo.getDefaultInstance()
: lockingUser_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(collectionId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, collectionId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getLockingUser());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(collectionId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, collectionId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getLockingUser());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.contentwarehouse.v1.LockDocumentRequest)) {
return super.equals(obj);
}
com.google.cloud.contentwarehouse.v1.LockDocumentRequest other =
(com.google.cloud.contentwarehouse.v1.LockDocumentRequest) obj;
if (!getName().equals(other.getName())) return false;
if (!getCollectionId().equals(other.getCollectionId())) return false;
if (hasLockingUser() != other.hasLockingUser()) return false;
if (hasLockingUser()) {
if (!getLockingUser().equals(other.getLockingUser())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + COLLECTION_ID_FIELD_NUMBER;
hash = (53 * hash) + getCollectionId().hashCode();
if (hasLockingUser()) {
hash = (37 * hash) + LOCKING_USER_FIELD_NUMBER;
hash = (53 * hash) + getLockingUser().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.contentwarehouse.v1.LockDocumentRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for DocumentService.LockDocument.
* </pre>
*
* Protobuf type {@code google.cloud.contentwarehouse.v1.LockDocumentRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.contentwarehouse.v1.LockDocumentRequest)
com.google.cloud.contentwarehouse.v1.LockDocumentRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.contentwarehouse.v1.DocumentServiceRequestProto
.internal_static_google_cloud_contentwarehouse_v1_LockDocumentRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.contentwarehouse.v1.DocumentServiceRequestProto
.internal_static_google_cloud_contentwarehouse_v1_LockDocumentRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.contentwarehouse.v1.LockDocumentRequest.class,
com.google.cloud.contentwarehouse.v1.LockDocumentRequest.Builder.class);
}
// Construct using com.google.cloud.contentwarehouse.v1.LockDocumentRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getLockingUserFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
collectionId_ = "";
lockingUser_ = null;
if (lockingUserBuilder_ != null) {
lockingUserBuilder_.dispose();
lockingUserBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.contentwarehouse.v1.DocumentServiceRequestProto
.internal_static_google_cloud_contentwarehouse_v1_LockDocumentRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.LockDocumentRequest getDefaultInstanceForType() {
return com.google.cloud.contentwarehouse.v1.LockDocumentRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.LockDocumentRequest build() {
com.google.cloud.contentwarehouse.v1.LockDocumentRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.LockDocumentRequest buildPartial() {
com.google.cloud.contentwarehouse.v1.LockDocumentRequest result =
new com.google.cloud.contentwarehouse.v1.LockDocumentRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.contentwarehouse.v1.LockDocumentRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.collectionId_ = collectionId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.lockingUser_ =
lockingUserBuilder_ == null ? lockingUser_ : lockingUserBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.contentwarehouse.v1.LockDocumentRequest) {
return mergeFrom((com.google.cloud.contentwarehouse.v1.LockDocumentRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.contentwarehouse.v1.LockDocumentRequest other) {
if (other == com.google.cloud.contentwarehouse.v1.LockDocumentRequest.getDefaultInstance())
return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getCollectionId().isEmpty()) {
collectionId_ = other.collectionId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasLockingUser()) {
mergeLockingUser(other.getLockingUser());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
collectionId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getLockingUserFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. The name of the document to lock.
* Format:
* projects/{project_number}/locations/{location}/documents/{document}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the document to lock.
* Format:
* projects/{project_number}/locations/{location}/documents/{document}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The name of the document to lock.
* Format:
* projects/{project_number}/locations/{location}/documents/{document}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the document to lock.
* Format:
* projects/{project_number}/locations/{location}/documents/{document}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The name of the document to lock.
* Format:
* projects/{project_number}/locations/{location}/documents/{document}.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object collectionId_ = "";
/**
*
*
* <pre>
* The collection the document connects to.
* </pre>
*
* <code>string collection_id = 2;</code>
*
* @return The collectionId.
*/
public java.lang.String getCollectionId() {
java.lang.Object ref = collectionId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
collectionId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The collection the document connects to.
* </pre>
*
* <code>string collection_id = 2;</code>
*
* @return The bytes for collectionId.
*/
public com.google.protobuf.ByteString getCollectionIdBytes() {
java.lang.Object ref = collectionId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
collectionId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The collection the document connects to.
* </pre>
*
* <code>string collection_id = 2;</code>
*
* @param value The collectionId to set.
* @return This builder for chaining.
*/
public Builder setCollectionId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
collectionId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The collection the document connects to.
* </pre>
*
* <code>string collection_id = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearCollectionId() {
collectionId_ = getDefaultInstance().getCollectionId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* The collection the document connects to.
* </pre>
*
* <code>string collection_id = 2;</code>
*
* @param value The bytes for collectionId to set.
* @return This builder for chaining.
*/
public Builder setCollectionIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
collectionId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.contentwarehouse.v1.UserInfo lockingUser_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.UserInfo,
com.google.cloud.contentwarehouse.v1.UserInfo.Builder,
com.google.cloud.contentwarehouse.v1.UserInfoOrBuilder>
lockingUserBuilder_;
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*
* @return Whether the lockingUser field is set.
*/
public boolean hasLockingUser() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*
* @return The lockingUser.
*/
public com.google.cloud.contentwarehouse.v1.UserInfo getLockingUser() {
if (lockingUserBuilder_ == null) {
return lockingUser_ == null
? com.google.cloud.contentwarehouse.v1.UserInfo.getDefaultInstance()
: lockingUser_;
} else {
return lockingUserBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*/
public Builder setLockingUser(com.google.cloud.contentwarehouse.v1.UserInfo value) {
if (lockingUserBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
lockingUser_ = value;
} else {
lockingUserBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*/
public Builder setLockingUser(
com.google.cloud.contentwarehouse.v1.UserInfo.Builder builderForValue) {
if (lockingUserBuilder_ == null) {
lockingUser_ = builderForValue.build();
} else {
lockingUserBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*/
public Builder mergeLockingUser(com.google.cloud.contentwarehouse.v1.UserInfo value) {
if (lockingUserBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& lockingUser_ != null
&& lockingUser_ != com.google.cloud.contentwarehouse.v1.UserInfo.getDefaultInstance()) {
getLockingUserBuilder().mergeFrom(value);
} else {
lockingUser_ = value;
}
} else {
lockingUserBuilder_.mergeFrom(value);
}
if (lockingUser_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*/
public Builder clearLockingUser() {
bitField0_ = (bitField0_ & ~0x00000004);
lockingUser_ = null;
if (lockingUserBuilder_ != null) {
lockingUserBuilder_.dispose();
lockingUserBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*/
public com.google.cloud.contentwarehouse.v1.UserInfo.Builder getLockingUserBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getLockingUserFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*/
public com.google.cloud.contentwarehouse.v1.UserInfoOrBuilder getLockingUserOrBuilder() {
if (lockingUserBuilder_ != null) {
return lockingUserBuilder_.getMessageOrBuilder();
} else {
return lockingUser_ == null
? com.google.cloud.contentwarehouse.v1.UserInfo.getDefaultInstance()
: lockingUser_;
}
}
/**
*
*
* <pre>
* The user information who locks the document.
* </pre>
*
* <code>.google.cloud.contentwarehouse.v1.UserInfo locking_user = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.UserInfo,
com.google.cloud.contentwarehouse.v1.UserInfo.Builder,
com.google.cloud.contentwarehouse.v1.UserInfoOrBuilder>
getLockingUserFieldBuilder() {
if (lockingUserBuilder_ == null) {
lockingUserBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.contentwarehouse.v1.UserInfo,
com.google.cloud.contentwarehouse.v1.UserInfo.Builder,
com.google.cloud.contentwarehouse.v1.UserInfoOrBuilder>(
getLockingUser(), getParentForChildren(), isClean());
lockingUser_ = null;
}
return lockingUserBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.contentwarehouse.v1.LockDocumentRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.contentwarehouse.v1.LockDocumentRequest)
private static final com.google.cloud.contentwarehouse.v1.LockDocumentRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.contentwarehouse.v1.LockDocumentRequest();
}
public static com.google.cloud.contentwarehouse.v1.LockDocumentRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<LockDocumentRequest> PARSER =
new com.google.protobuf.AbstractParser<LockDocumentRequest>() {
@java.lang.Override
public LockDocumentRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<LockDocumentRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<LockDocumentRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.contentwarehouse.v1.LockDocumentRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 35,882 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/common/VideoBumperInStreamAdInfo.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/common/ad_type_infos.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.common;
/**
* <pre>
* Representation of video bumper in-stream ad format (very short in-stream
* non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.common.VideoBumperInStreamAdInfo}
*/
public final class VideoBumperInStreamAdInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.common.VideoBumperInStreamAdInfo)
VideoBumperInStreamAdInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use VideoBumperInStreamAdInfo.newBuilder() to construct.
private VideoBumperInStreamAdInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VideoBumperInStreamAdInfo() {
actionButtonLabel_ = "";
actionHeadline_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new VideoBumperInStreamAdInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_VideoBumperInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_VideoBumperInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo.class, com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo.Builder.class);
}
private int bitField0_;
public static final int COMPANION_BANNER_FIELD_NUMBER = 3;
private com.google.ads.googleads.v19.common.AdImageAsset companionBanner_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
* @return Whether the companionBanner field is set.
*/
@java.lang.Override
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
* @return The companionBanner.
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.AdImageAsset getCompanionBanner() {
return companionBanner_ == null ? com.google.ads.googleads.v19.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
return companionBanner_ == null ? com.google.ads.googleads.v19.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
public static final int ACTION_BUTTON_LABEL_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The actionButtonLabel.
*/
@java.lang.Override
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The bytes for actionButtonLabel.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ACTION_HEADLINE_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The actionHeadline.
*/
@java.lang.Override
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
}
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The bytes for actionHeadline.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getCompanionBanner());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, actionHeadline_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getCompanionBanner());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, actionHeadline_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo other = (com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo) obj;
if (hasCompanionBanner() != other.hasCompanionBanner()) return false;
if (hasCompanionBanner()) {
if (!getCompanionBanner()
.equals(other.getCompanionBanner())) return false;
}
if (!getActionButtonLabel()
.equals(other.getActionButtonLabel())) return false;
if (!getActionHeadline()
.equals(other.getActionHeadline())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCompanionBanner()) {
hash = (37 * hash) + COMPANION_BANNER_FIELD_NUMBER;
hash = (53 * hash) + getCompanionBanner().hashCode();
}
hash = (37 * hash) + ACTION_BUTTON_LABEL_FIELD_NUMBER;
hash = (53 * hash) + getActionButtonLabel().hashCode();
hash = (37 * hash) + ACTION_HEADLINE_FIELD_NUMBER;
hash = (53 * hash) + getActionHeadline().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Representation of video bumper in-stream ad format (very short in-stream
* non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.common.VideoBumperInStreamAdInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.common.VideoBumperInStreamAdInfo)
com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_VideoBumperInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_VideoBumperInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo.class, com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo.Builder.class);
}
// Construct using com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getCompanionBannerFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
actionButtonLabel_ = "";
actionHeadline_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.common.AdTypeInfosProto.internal_static_google_ads_googleads_v19_common_VideoBumperInStreamAdInfo_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo getDefaultInstanceForType() {
return com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo build() {
com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo buildPartial() {
com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo result = new com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.companionBanner_ = companionBannerBuilder_ == null
? companionBanner_
: companionBannerBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.actionButtonLabel_ = actionButtonLabel_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.actionHeadline_ = actionHeadline_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo) {
return mergeFrom((com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo other) {
if (other == com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo.getDefaultInstance()) return this;
if (other.hasCompanionBanner()) {
mergeCompanionBanner(other.getCompanionBanner());
}
if (!other.getActionButtonLabel().isEmpty()) {
actionButtonLabel_ = other.actionButtonLabel_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getActionHeadline().isEmpty()) {
actionHeadline_ = other.actionHeadline_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 26: {
input.readMessage(
getCompanionBannerFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 26
case 34: {
actionButtonLabel_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 42: {
actionHeadline_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.ads.googleads.v19.common.AdImageAsset companionBanner_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.common.AdImageAsset, com.google.ads.googleads.v19.common.AdImageAsset.Builder, com.google.ads.googleads.v19.common.AdImageAssetOrBuilder> companionBannerBuilder_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
* @return Whether the companionBanner field is set.
*/
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
* @return The companionBanner.
*/
public com.google.ads.googleads.v19.common.AdImageAsset getCompanionBanner() {
if (companionBannerBuilder_ == null) {
return companionBanner_ == null ? com.google.ads.googleads.v19.common.AdImageAsset.getDefaultInstance() : companionBanner_;
} else {
return companionBannerBuilder_.getMessage();
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder setCompanionBanner(com.google.ads.googleads.v19.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
companionBanner_ = value;
} else {
companionBannerBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder setCompanionBanner(
com.google.ads.googleads.v19.common.AdImageAsset.Builder builderForValue) {
if (companionBannerBuilder_ == null) {
companionBanner_ = builderForValue.build();
} else {
companionBannerBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder mergeCompanionBanner(com.google.ads.googleads.v19.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
companionBanner_ != null &&
companionBanner_ != com.google.ads.googleads.v19.common.AdImageAsset.getDefaultInstance()) {
getCompanionBannerBuilder().mergeFrom(value);
} else {
companionBanner_ = value;
}
} else {
companionBannerBuilder_.mergeFrom(value);
}
if (companionBanner_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder clearCompanionBanner() {
bitField0_ = (bitField0_ & ~0x00000001);
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
*/
public com.google.ads.googleads.v19.common.AdImageAsset.Builder getCompanionBannerBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompanionBannerFieldBuilder().getBuilder();
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
*/
public com.google.ads.googleads.v19.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
if (companionBannerBuilder_ != null) {
return companionBannerBuilder_.getMessageOrBuilder();
} else {
return companionBanner_ == null ?
com.google.ads.googleads.v19.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AdImageAsset companion_banner = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.common.AdImageAsset, com.google.ads.googleads.v19.common.AdImageAsset.Builder, com.google.ads.googleads.v19.common.AdImageAssetOrBuilder>
getCompanionBannerFieldBuilder() {
if (companionBannerBuilder_ == null) {
companionBannerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.common.AdImageAsset, com.google.ads.googleads.v19.common.AdImageAsset.Builder, com.google.ads.googleads.v19.common.AdImageAssetOrBuilder>(
getCompanionBanner(),
getParentForChildren(),
isClean());
companionBanner_ = null;
}
return companionBannerBuilder_;
}
private java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The actionButtonLabel.
*/
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The bytes for actionButtonLabel.
*/
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @param value The actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabel(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return This builder for chaining.
*/
public Builder clearActionButtonLabel() {
actionButtonLabel_ = getDefaultInstance().getActionButtonLabel();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @param value The bytes for actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabelBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The actionHeadline.
*/
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The bytes for actionHeadline.
*/
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @param value The actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadline(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return This builder for chaining.
*/
public Builder clearActionHeadline() {
actionHeadline_ = getDefaultInstance().getActionHeadline();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @param value The bytes for actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadlineBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.common.VideoBumperInStreamAdInfo)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.common.VideoBumperInStreamAdInfo)
private static final com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo();
}
public static com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VideoBumperInStreamAdInfo>
PARSER = new com.google.protobuf.AbstractParser<VideoBumperInStreamAdInfo>() {
@java.lang.Override
public VideoBumperInStreamAdInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VideoBumperInStreamAdInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VideoBumperInStreamAdInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.common.VideoBumperInStreamAdInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 35,882 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/common/VideoBumperInStreamAdInfo.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/common/ad_type_infos.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.common;
/**
* <pre>
* Representation of video bumper in-stream ad format (very short in-stream
* non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.common.VideoBumperInStreamAdInfo}
*/
public final class VideoBumperInStreamAdInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.common.VideoBumperInStreamAdInfo)
VideoBumperInStreamAdInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use VideoBumperInStreamAdInfo.newBuilder() to construct.
private VideoBumperInStreamAdInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VideoBumperInStreamAdInfo() {
actionButtonLabel_ = "";
actionHeadline_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new VideoBumperInStreamAdInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_VideoBumperInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_VideoBumperInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo.class, com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo.Builder.class);
}
private int bitField0_;
public static final int COMPANION_BANNER_FIELD_NUMBER = 3;
private com.google.ads.googleads.v20.common.AdImageAsset companionBanner_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
* @return Whether the companionBanner field is set.
*/
@java.lang.Override
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
* @return The companionBanner.
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.AdImageAsset getCompanionBanner() {
return companionBanner_ == null ? com.google.ads.googleads.v20.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
return companionBanner_ == null ? com.google.ads.googleads.v20.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
public static final int ACTION_BUTTON_LABEL_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The actionButtonLabel.
*/
@java.lang.Override
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The bytes for actionButtonLabel.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ACTION_HEADLINE_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The actionHeadline.
*/
@java.lang.Override
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
}
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The bytes for actionHeadline.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getCompanionBanner());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, actionHeadline_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getCompanionBanner());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, actionHeadline_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo other = (com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo) obj;
if (hasCompanionBanner() != other.hasCompanionBanner()) return false;
if (hasCompanionBanner()) {
if (!getCompanionBanner()
.equals(other.getCompanionBanner())) return false;
}
if (!getActionButtonLabel()
.equals(other.getActionButtonLabel())) return false;
if (!getActionHeadline()
.equals(other.getActionHeadline())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCompanionBanner()) {
hash = (37 * hash) + COMPANION_BANNER_FIELD_NUMBER;
hash = (53 * hash) + getCompanionBanner().hashCode();
}
hash = (37 * hash) + ACTION_BUTTON_LABEL_FIELD_NUMBER;
hash = (53 * hash) + getActionButtonLabel().hashCode();
hash = (37 * hash) + ACTION_HEADLINE_FIELD_NUMBER;
hash = (53 * hash) + getActionHeadline().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Representation of video bumper in-stream ad format (very short in-stream
* non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.common.VideoBumperInStreamAdInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.common.VideoBumperInStreamAdInfo)
com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_VideoBumperInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_VideoBumperInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo.class, com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo.Builder.class);
}
// Construct using com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getCompanionBannerFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
actionButtonLabel_ = "";
actionHeadline_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.common.AdTypeInfosProto.internal_static_google_ads_googleads_v20_common_VideoBumperInStreamAdInfo_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo getDefaultInstanceForType() {
return com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo build() {
com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo buildPartial() {
com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo result = new com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.companionBanner_ = companionBannerBuilder_ == null
? companionBanner_
: companionBannerBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.actionButtonLabel_ = actionButtonLabel_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.actionHeadline_ = actionHeadline_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo) {
return mergeFrom((com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo other) {
if (other == com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo.getDefaultInstance()) return this;
if (other.hasCompanionBanner()) {
mergeCompanionBanner(other.getCompanionBanner());
}
if (!other.getActionButtonLabel().isEmpty()) {
actionButtonLabel_ = other.actionButtonLabel_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getActionHeadline().isEmpty()) {
actionHeadline_ = other.actionHeadline_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 26: {
input.readMessage(
getCompanionBannerFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 26
case 34: {
actionButtonLabel_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 42: {
actionHeadline_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.ads.googleads.v20.common.AdImageAsset companionBanner_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.common.AdImageAsset, com.google.ads.googleads.v20.common.AdImageAsset.Builder, com.google.ads.googleads.v20.common.AdImageAssetOrBuilder> companionBannerBuilder_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
* @return Whether the companionBanner field is set.
*/
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
* @return The companionBanner.
*/
public com.google.ads.googleads.v20.common.AdImageAsset getCompanionBanner() {
if (companionBannerBuilder_ == null) {
return companionBanner_ == null ? com.google.ads.googleads.v20.common.AdImageAsset.getDefaultInstance() : companionBanner_;
} else {
return companionBannerBuilder_.getMessage();
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder setCompanionBanner(com.google.ads.googleads.v20.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
companionBanner_ = value;
} else {
companionBannerBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder setCompanionBanner(
com.google.ads.googleads.v20.common.AdImageAsset.Builder builderForValue) {
if (companionBannerBuilder_ == null) {
companionBanner_ = builderForValue.build();
} else {
companionBannerBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder mergeCompanionBanner(com.google.ads.googleads.v20.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
companionBanner_ != null &&
companionBanner_ != com.google.ads.googleads.v20.common.AdImageAsset.getDefaultInstance()) {
getCompanionBannerBuilder().mergeFrom(value);
} else {
companionBanner_ = value;
}
} else {
companionBannerBuilder_.mergeFrom(value);
}
if (companionBanner_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder clearCompanionBanner() {
bitField0_ = (bitField0_ & ~0x00000001);
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
*/
public com.google.ads.googleads.v20.common.AdImageAsset.Builder getCompanionBannerBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompanionBannerFieldBuilder().getBuilder();
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
*/
public com.google.ads.googleads.v20.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
if (companionBannerBuilder_ != null) {
return companionBannerBuilder_.getMessageOrBuilder();
} else {
return companionBanner_ == null ?
com.google.ads.googleads.v20.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AdImageAsset companion_banner = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.common.AdImageAsset, com.google.ads.googleads.v20.common.AdImageAsset.Builder, com.google.ads.googleads.v20.common.AdImageAssetOrBuilder>
getCompanionBannerFieldBuilder() {
if (companionBannerBuilder_ == null) {
companionBannerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.common.AdImageAsset, com.google.ads.googleads.v20.common.AdImageAsset.Builder, com.google.ads.googleads.v20.common.AdImageAssetOrBuilder>(
getCompanionBanner(),
getParentForChildren(),
isClean());
companionBanner_ = null;
}
return companionBannerBuilder_;
}
private java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The actionButtonLabel.
*/
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The bytes for actionButtonLabel.
*/
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @param value The actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabel(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return This builder for chaining.
*/
public Builder clearActionButtonLabel() {
actionButtonLabel_ = getDefaultInstance().getActionButtonLabel();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @param value The bytes for actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabelBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The actionHeadline.
*/
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The bytes for actionHeadline.
*/
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @param value The actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadline(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return This builder for chaining.
*/
public Builder clearActionHeadline() {
actionHeadline_ = getDefaultInstance().getActionHeadline();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @param value The bytes for actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadlineBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.common.VideoBumperInStreamAdInfo)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.common.VideoBumperInStreamAdInfo)
private static final com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo();
}
public static com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VideoBumperInStreamAdInfo>
PARSER = new com.google.protobuf.AbstractParser<VideoBumperInStreamAdInfo>() {
@java.lang.Override
public VideoBumperInStreamAdInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VideoBumperInStreamAdInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VideoBumperInStreamAdInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.common.VideoBumperInStreamAdInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 35,882 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/common/VideoBumperInStreamAdInfo.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/common/ad_type_infos.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.common;
/**
* <pre>
* Representation of video bumper in-stream ad format (very short in-stream
* non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.VideoBumperInStreamAdInfo}
*/
public final class VideoBumperInStreamAdInfo extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.common.VideoBumperInStreamAdInfo)
VideoBumperInStreamAdInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use VideoBumperInStreamAdInfo.newBuilder() to construct.
private VideoBumperInStreamAdInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VideoBumperInStreamAdInfo() {
actionButtonLabel_ = "";
actionHeadline_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new VideoBumperInStreamAdInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_VideoBumperInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_VideoBumperInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo.class, com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo.Builder.class);
}
private int bitField0_;
public static final int COMPANION_BANNER_FIELD_NUMBER = 3;
private com.google.ads.googleads.v21.common.AdImageAsset companionBanner_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
* @return Whether the companionBanner field is set.
*/
@java.lang.Override
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
* @return The companionBanner.
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AdImageAsset getCompanionBanner() {
return companionBanner_ == null ? com.google.ads.googleads.v21.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
return companionBanner_ == null ? com.google.ads.googleads.v21.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
public static final int ACTION_BUTTON_LABEL_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The actionButtonLabel.
*/
@java.lang.Override
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The bytes for actionButtonLabel.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ACTION_HEADLINE_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The actionHeadline.
*/
@java.lang.Override
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
}
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The bytes for actionHeadline.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getCompanionBanner());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, actionHeadline_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getCompanionBanner());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionButtonLabel_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, actionButtonLabel_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(actionHeadline_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, actionHeadline_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo other = (com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo) obj;
if (hasCompanionBanner() != other.hasCompanionBanner()) return false;
if (hasCompanionBanner()) {
if (!getCompanionBanner()
.equals(other.getCompanionBanner())) return false;
}
if (!getActionButtonLabel()
.equals(other.getActionButtonLabel())) return false;
if (!getActionHeadline()
.equals(other.getActionHeadline())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCompanionBanner()) {
hash = (37 * hash) + COMPANION_BANNER_FIELD_NUMBER;
hash = (53 * hash) + getCompanionBanner().hashCode();
}
hash = (37 * hash) + ACTION_BUTTON_LABEL_FIELD_NUMBER;
hash = (53 * hash) + getActionButtonLabel().hashCode();
hash = (37 * hash) + ACTION_HEADLINE_FIELD_NUMBER;
hash = (53 * hash) + getActionHeadline().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Representation of video bumper in-stream ad format (very short in-stream
* non-skippable video ad).
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.common.VideoBumperInStreamAdInfo}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.common.VideoBumperInStreamAdInfo)
com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_VideoBumperInStreamAdInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_VideoBumperInStreamAdInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo.class, com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo.Builder.class);
}
// Construct using com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getCompanionBannerFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
actionButtonLabel_ = "";
actionHeadline_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.common.AdTypeInfosProto.internal_static_google_ads_googleads_v21_common_VideoBumperInStreamAdInfo_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo getDefaultInstanceForType() {
return com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo build() {
com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo buildPartial() {
com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo result = new com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.companionBanner_ = companionBannerBuilder_ == null
? companionBanner_
: companionBannerBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.actionButtonLabel_ = actionButtonLabel_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.actionHeadline_ = actionHeadline_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo) {
return mergeFrom((com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo other) {
if (other == com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo.getDefaultInstance()) return this;
if (other.hasCompanionBanner()) {
mergeCompanionBanner(other.getCompanionBanner());
}
if (!other.getActionButtonLabel().isEmpty()) {
actionButtonLabel_ = other.actionButtonLabel_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getActionHeadline().isEmpty()) {
actionHeadline_ = other.actionHeadline_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 26: {
input.readMessage(
getCompanionBannerFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 26
case 34: {
actionButtonLabel_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 34
case 42: {
actionHeadline_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 42
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.ads.googleads.v21.common.AdImageAsset companionBanner_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AdImageAsset, com.google.ads.googleads.v21.common.AdImageAsset.Builder, com.google.ads.googleads.v21.common.AdImageAssetOrBuilder> companionBannerBuilder_;
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
* @return Whether the companionBanner field is set.
*/
public boolean hasCompanionBanner() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
* @return The companionBanner.
*/
public com.google.ads.googleads.v21.common.AdImageAsset getCompanionBanner() {
if (companionBannerBuilder_ == null) {
return companionBanner_ == null ? com.google.ads.googleads.v21.common.AdImageAsset.getDefaultInstance() : companionBanner_;
} else {
return companionBannerBuilder_.getMessage();
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder setCompanionBanner(com.google.ads.googleads.v21.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
companionBanner_ = value;
} else {
companionBannerBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder setCompanionBanner(
com.google.ads.googleads.v21.common.AdImageAsset.Builder builderForValue) {
if (companionBannerBuilder_ == null) {
companionBanner_ = builderForValue.build();
} else {
companionBannerBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder mergeCompanionBanner(com.google.ads.googleads.v21.common.AdImageAsset value) {
if (companionBannerBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
companionBanner_ != null &&
companionBanner_ != com.google.ads.googleads.v21.common.AdImageAsset.getDefaultInstance()) {
getCompanionBannerBuilder().mergeFrom(value);
} else {
companionBanner_ = value;
}
} else {
companionBannerBuilder_.mergeFrom(value);
}
if (companionBanner_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
*/
public Builder clearCompanionBanner() {
bitField0_ = (bitField0_ & ~0x00000001);
companionBanner_ = null;
if (companionBannerBuilder_ != null) {
companionBannerBuilder_.dispose();
companionBannerBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
*/
public com.google.ads.googleads.v21.common.AdImageAsset.Builder getCompanionBannerBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCompanionBannerFieldBuilder().getBuilder();
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
*/
public com.google.ads.googleads.v21.common.AdImageAssetOrBuilder getCompanionBannerOrBuilder() {
if (companionBannerBuilder_ != null) {
return companionBannerBuilder_.getMessageOrBuilder();
} else {
return companionBanner_ == null ?
com.google.ads.googleads.v21.common.AdImageAsset.getDefaultInstance() : companionBanner_;
}
}
/**
* <pre>
* The image assets of the companion banner used with the ad.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AdImageAsset companion_banner = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AdImageAsset, com.google.ads.googleads.v21.common.AdImageAsset.Builder, com.google.ads.googleads.v21.common.AdImageAssetOrBuilder>
getCompanionBannerFieldBuilder() {
if (companionBannerBuilder_ == null) {
companionBannerBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AdImageAsset, com.google.ads.googleads.v21.common.AdImageAsset.Builder, com.google.ads.googleads.v21.common.AdImageAssetOrBuilder>(
getCompanionBanner(),
getParentForChildren(),
isClean());
companionBanner_ = null;
}
return companionBannerBuilder_;
}
private java.lang.Object actionButtonLabel_ = "";
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The actionButtonLabel.
*/
public java.lang.String getActionButtonLabel() {
java.lang.Object ref = actionButtonLabel_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionButtonLabel_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return The bytes for actionButtonLabel.
*/
public com.google.protobuf.ByteString
getActionButtonLabelBytes() {
java.lang.Object ref = actionButtonLabel_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionButtonLabel_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @param value The actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabel(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @return This builder for chaining.
*/
public Builder clearActionButtonLabel() {
actionButtonLabel_ = getDefaultInstance().getActionButtonLabel();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Label on the "Call To Action" button taking the user to the video ad's
* final URL.
* </pre>
*
* <code>string action_button_label = 4;</code>
* @param value The bytes for actionButtonLabel to set.
* @return This builder for chaining.
*/
public Builder setActionButtonLabelBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionButtonLabel_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object actionHeadline_ = "";
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The actionHeadline.
*/
public java.lang.String getActionHeadline() {
java.lang.Object ref = actionHeadline_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
actionHeadline_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return The bytes for actionHeadline.
*/
public com.google.protobuf.ByteString
getActionHeadlineBytes() {
java.lang.Object ref = actionHeadline_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
actionHeadline_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @param value The actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadline(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @return This builder for chaining.
*/
public Builder clearActionHeadline() {
actionHeadline_ = getDefaultInstance().getActionHeadline();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
* <pre>
* Additional text displayed with the CTA (call-to-action) button to give
* context and encourage clicking on the button.
* </pre>
*
* <code>string action_headline = 5;</code>
* @param value The bytes for actionHeadline to set.
* @return This builder for chaining.
*/
public Builder setActionHeadlineBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
actionHeadline_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.common.VideoBumperInStreamAdInfo)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.common.VideoBumperInStreamAdInfo)
private static final com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo();
}
public static com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VideoBumperInStreamAdInfo>
PARSER = new com.google.protobuf.AbstractParser<VideoBumperInStreamAdInfo>() {
@java.lang.Override
public VideoBumperInStreamAdInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VideoBumperInStreamAdInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VideoBumperInStreamAdInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.common.VideoBumperInStreamAdInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,742 | java-run/proto-google-cloud-run-v2/src/main/java/com/google/cloud/run/v2/ListWorkerPoolsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/run/v2/worker_pool.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.run.v2;
/**
*
*
* <pre>
* Response message containing a list of WorkerPools.
* </pre>
*
* Protobuf type {@code google.cloud.run.v2.ListWorkerPoolsResponse}
*/
public final class ListWorkerPoolsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.run.v2.ListWorkerPoolsResponse)
ListWorkerPoolsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListWorkerPoolsResponse.newBuilder() to construct.
private ListWorkerPoolsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListWorkerPoolsResponse() {
workerPools_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListWorkerPoolsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.run.v2.WorkerPoolProto
.internal_static_google_cloud_run_v2_ListWorkerPoolsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.run.v2.WorkerPoolProto
.internal_static_google_cloud_run_v2_ListWorkerPoolsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.run.v2.ListWorkerPoolsResponse.class,
com.google.cloud.run.v2.ListWorkerPoolsResponse.Builder.class);
}
public static final int WORKER_POOLS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.run.v2.WorkerPool> workerPools_;
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.run.v2.WorkerPool> getWorkerPoolsList() {
return workerPools_;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.run.v2.WorkerPoolOrBuilder>
getWorkerPoolsOrBuilderList() {
return workerPools_;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
@java.lang.Override
public int getWorkerPoolsCount() {
return workerPools_.size();
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
@java.lang.Override
public com.google.cloud.run.v2.WorkerPool getWorkerPools(int index) {
return workerPools_.get(index);
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
@java.lang.Override
public com.google.cloud.run.v2.WorkerPoolOrBuilder getWorkerPoolsOrBuilder(int index) {
return workerPools_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token indicating there are more items than page_size. Use it in the next
* ListWorkerPools request to continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token indicating there are more items than page_size. Use it in the next
* ListWorkerPools request to continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < workerPools_.size(); i++) {
output.writeMessage(1, workerPools_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < workerPools_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, workerPools_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.run.v2.ListWorkerPoolsResponse)) {
return super.equals(obj);
}
com.google.cloud.run.v2.ListWorkerPoolsResponse other =
(com.google.cloud.run.v2.ListWorkerPoolsResponse) obj;
if (!getWorkerPoolsList().equals(other.getWorkerPoolsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getWorkerPoolsCount() > 0) {
hash = (37 * hash) + WORKER_POOLS_FIELD_NUMBER;
hash = (53 * hash) + getWorkerPoolsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.run.v2.ListWorkerPoolsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message containing a list of WorkerPools.
* </pre>
*
* Protobuf type {@code google.cloud.run.v2.ListWorkerPoolsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.run.v2.ListWorkerPoolsResponse)
com.google.cloud.run.v2.ListWorkerPoolsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.run.v2.WorkerPoolProto
.internal_static_google_cloud_run_v2_ListWorkerPoolsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.run.v2.WorkerPoolProto
.internal_static_google_cloud_run_v2_ListWorkerPoolsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.run.v2.ListWorkerPoolsResponse.class,
com.google.cloud.run.v2.ListWorkerPoolsResponse.Builder.class);
}
// Construct using com.google.cloud.run.v2.ListWorkerPoolsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (workerPoolsBuilder_ == null) {
workerPools_ = java.util.Collections.emptyList();
} else {
workerPools_ = null;
workerPoolsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.run.v2.WorkerPoolProto
.internal_static_google_cloud_run_v2_ListWorkerPoolsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.run.v2.ListWorkerPoolsResponse getDefaultInstanceForType() {
return com.google.cloud.run.v2.ListWorkerPoolsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.run.v2.ListWorkerPoolsResponse build() {
com.google.cloud.run.v2.ListWorkerPoolsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.run.v2.ListWorkerPoolsResponse buildPartial() {
com.google.cloud.run.v2.ListWorkerPoolsResponse result =
new com.google.cloud.run.v2.ListWorkerPoolsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.run.v2.ListWorkerPoolsResponse result) {
if (workerPoolsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
workerPools_ = java.util.Collections.unmodifiableList(workerPools_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.workerPools_ = workerPools_;
} else {
result.workerPools_ = workerPoolsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.run.v2.ListWorkerPoolsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.run.v2.ListWorkerPoolsResponse) {
return mergeFrom((com.google.cloud.run.v2.ListWorkerPoolsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.run.v2.ListWorkerPoolsResponse other) {
if (other == com.google.cloud.run.v2.ListWorkerPoolsResponse.getDefaultInstance())
return this;
if (workerPoolsBuilder_ == null) {
if (!other.workerPools_.isEmpty()) {
if (workerPools_.isEmpty()) {
workerPools_ = other.workerPools_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureWorkerPoolsIsMutable();
workerPools_.addAll(other.workerPools_);
}
onChanged();
}
} else {
if (!other.workerPools_.isEmpty()) {
if (workerPoolsBuilder_.isEmpty()) {
workerPoolsBuilder_.dispose();
workerPoolsBuilder_ = null;
workerPools_ = other.workerPools_;
bitField0_ = (bitField0_ & ~0x00000001);
workerPoolsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getWorkerPoolsFieldBuilder()
: null;
} else {
workerPoolsBuilder_.addAllMessages(other.workerPools_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.run.v2.WorkerPool m =
input.readMessage(
com.google.cloud.run.v2.WorkerPool.parser(), extensionRegistry);
if (workerPoolsBuilder_ == null) {
ensureWorkerPoolsIsMutable();
workerPools_.add(m);
} else {
workerPoolsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.run.v2.WorkerPool> workerPools_ =
java.util.Collections.emptyList();
private void ensureWorkerPoolsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
workerPools_ = new java.util.ArrayList<com.google.cloud.run.v2.WorkerPool>(workerPools_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.run.v2.WorkerPool,
com.google.cloud.run.v2.WorkerPool.Builder,
com.google.cloud.run.v2.WorkerPoolOrBuilder>
workerPoolsBuilder_;
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public java.util.List<com.google.cloud.run.v2.WorkerPool> getWorkerPoolsList() {
if (workerPoolsBuilder_ == null) {
return java.util.Collections.unmodifiableList(workerPools_);
} else {
return workerPoolsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public int getWorkerPoolsCount() {
if (workerPoolsBuilder_ == null) {
return workerPools_.size();
} else {
return workerPoolsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public com.google.cloud.run.v2.WorkerPool getWorkerPools(int index) {
if (workerPoolsBuilder_ == null) {
return workerPools_.get(index);
} else {
return workerPoolsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public Builder setWorkerPools(int index, com.google.cloud.run.v2.WorkerPool value) {
if (workerPoolsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWorkerPoolsIsMutable();
workerPools_.set(index, value);
onChanged();
} else {
workerPoolsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public Builder setWorkerPools(
int index, com.google.cloud.run.v2.WorkerPool.Builder builderForValue) {
if (workerPoolsBuilder_ == null) {
ensureWorkerPoolsIsMutable();
workerPools_.set(index, builderForValue.build());
onChanged();
} else {
workerPoolsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public Builder addWorkerPools(com.google.cloud.run.v2.WorkerPool value) {
if (workerPoolsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWorkerPoolsIsMutable();
workerPools_.add(value);
onChanged();
} else {
workerPoolsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public Builder addWorkerPools(int index, com.google.cloud.run.v2.WorkerPool value) {
if (workerPoolsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWorkerPoolsIsMutable();
workerPools_.add(index, value);
onChanged();
} else {
workerPoolsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public Builder addWorkerPools(com.google.cloud.run.v2.WorkerPool.Builder builderForValue) {
if (workerPoolsBuilder_ == null) {
ensureWorkerPoolsIsMutable();
workerPools_.add(builderForValue.build());
onChanged();
} else {
workerPoolsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public Builder addWorkerPools(
int index, com.google.cloud.run.v2.WorkerPool.Builder builderForValue) {
if (workerPoolsBuilder_ == null) {
ensureWorkerPoolsIsMutable();
workerPools_.add(index, builderForValue.build());
onChanged();
} else {
workerPoolsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public Builder addAllWorkerPools(
java.lang.Iterable<? extends com.google.cloud.run.v2.WorkerPool> values) {
if (workerPoolsBuilder_ == null) {
ensureWorkerPoolsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, workerPools_);
onChanged();
} else {
workerPoolsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public Builder clearWorkerPools() {
if (workerPoolsBuilder_ == null) {
workerPools_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
workerPoolsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public Builder removeWorkerPools(int index) {
if (workerPoolsBuilder_ == null) {
ensureWorkerPoolsIsMutable();
workerPools_.remove(index);
onChanged();
} else {
workerPoolsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public com.google.cloud.run.v2.WorkerPool.Builder getWorkerPoolsBuilder(int index) {
return getWorkerPoolsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public com.google.cloud.run.v2.WorkerPoolOrBuilder getWorkerPoolsOrBuilder(int index) {
if (workerPoolsBuilder_ == null) {
return workerPools_.get(index);
} else {
return workerPoolsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public java.util.List<? extends com.google.cloud.run.v2.WorkerPoolOrBuilder>
getWorkerPoolsOrBuilderList() {
if (workerPoolsBuilder_ != null) {
return workerPoolsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(workerPools_);
}
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public com.google.cloud.run.v2.WorkerPool.Builder addWorkerPoolsBuilder() {
return getWorkerPoolsFieldBuilder()
.addBuilder(com.google.cloud.run.v2.WorkerPool.getDefaultInstance());
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public com.google.cloud.run.v2.WorkerPool.Builder addWorkerPoolsBuilder(int index) {
return getWorkerPoolsFieldBuilder()
.addBuilder(index, com.google.cloud.run.v2.WorkerPool.getDefaultInstance());
}
/**
*
*
* <pre>
* The resulting list of WorkerPools.
* </pre>
*
* <code>repeated .google.cloud.run.v2.WorkerPool worker_pools = 1;</code>
*/
public java.util.List<com.google.cloud.run.v2.WorkerPool.Builder> getWorkerPoolsBuilderList() {
return getWorkerPoolsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.run.v2.WorkerPool,
com.google.cloud.run.v2.WorkerPool.Builder,
com.google.cloud.run.v2.WorkerPoolOrBuilder>
getWorkerPoolsFieldBuilder() {
if (workerPoolsBuilder_ == null) {
workerPoolsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.run.v2.WorkerPool,
com.google.cloud.run.v2.WorkerPool.Builder,
com.google.cloud.run.v2.WorkerPoolOrBuilder>(
workerPools_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
workerPools_ = null;
}
return workerPoolsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token indicating there are more items than page_size. Use it in the next
* ListWorkerPools request to continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token indicating there are more items than page_size. Use it in the next
* ListWorkerPools request to continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token indicating there are more items than page_size. Use it in the next
* ListWorkerPools request to continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token indicating there are more items than page_size. Use it in the next
* ListWorkerPools request to continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token indicating there are more items than page_size. Use it in the next
* ListWorkerPools request to continue.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.run.v2.ListWorkerPoolsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.run.v2.ListWorkerPoolsResponse)
private static final com.google.cloud.run.v2.ListWorkerPoolsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.run.v2.ListWorkerPoolsResponse();
}
public static com.google.cloud.run.v2.ListWorkerPoolsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListWorkerPoolsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListWorkerPoolsResponse>() {
@java.lang.Override
public ListWorkerPoolsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListWorkerPoolsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListWorkerPoolsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.run.v2.ListWorkerPoolsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,717 | java-datastream/proto-google-cloud-datastream-v1alpha1/src/main/java/com/google/cloud/datastream/v1alpha1/OracleSchema.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datastream/v1alpha1/datastream_resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datastream.v1alpha1;
/**
*
*
* <pre>
* Oracle schema.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1alpha1.OracleSchema}
*/
public final class OracleSchema extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1alpha1.OracleSchema)
OracleSchemaOrBuilder {
private static final long serialVersionUID = 0L;
// Use OracleSchema.newBuilder() to construct.
private OracleSchema(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private OracleSchema() {
schemaName_ = "";
oracleTables_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new OracleSchema();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_OracleSchema_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_OracleSchema_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1alpha1.OracleSchema.class,
com.google.cloud.datastream.v1alpha1.OracleSchema.Builder.class);
}
public static final int SCHEMA_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object schemaName_ = "";
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema_name = 1;</code>
*
* @return The schemaName.
*/
@java.lang.Override
public java.lang.String getSchemaName() {
java.lang.Object ref = schemaName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
schemaName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema_name = 1;</code>
*
* @return The bytes for schemaName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getSchemaNameBytes() {
java.lang.Object ref = schemaName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
schemaName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ORACLE_TABLES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datastream.v1alpha1.OracleTable> oracleTables_;
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datastream.v1alpha1.OracleTable> getOracleTablesList() {
return oracleTables_;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datastream.v1alpha1.OracleTableOrBuilder>
getOracleTablesOrBuilderList() {
return oracleTables_;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
@java.lang.Override
public int getOracleTablesCount() {
return oracleTables_.size();
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.OracleTable getOracleTables(int index) {
return oracleTables_.get(index);
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.OracleTableOrBuilder getOracleTablesOrBuilder(
int index) {
return oracleTables_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(schemaName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, schemaName_);
}
for (int i = 0; i < oracleTables_.size(); i++) {
output.writeMessage(2, oracleTables_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(schemaName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, schemaName_);
}
for (int i = 0; i < oracleTables_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, oracleTables_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datastream.v1alpha1.OracleSchema)) {
return super.equals(obj);
}
com.google.cloud.datastream.v1alpha1.OracleSchema other =
(com.google.cloud.datastream.v1alpha1.OracleSchema) obj;
if (!getSchemaName().equals(other.getSchemaName())) return false;
if (!getOracleTablesList().equals(other.getOracleTablesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + SCHEMA_NAME_FIELD_NUMBER;
hash = (53 * hash) + getSchemaName().hashCode();
if (getOracleTablesCount() > 0) {
hash = (37 * hash) + ORACLE_TABLES_FIELD_NUMBER;
hash = (53 * hash) + getOracleTablesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.datastream.v1alpha1.OracleSchema prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Oracle schema.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1alpha1.OracleSchema}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1alpha1.OracleSchema)
com.google.cloud.datastream.v1alpha1.OracleSchemaOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_OracleSchema_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_OracleSchema_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1alpha1.OracleSchema.class,
com.google.cloud.datastream.v1alpha1.OracleSchema.Builder.class);
}
// Construct using com.google.cloud.datastream.v1alpha1.OracleSchema.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
schemaName_ = "";
if (oracleTablesBuilder_ == null) {
oracleTables_ = java.util.Collections.emptyList();
} else {
oracleTables_ = null;
oracleTablesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datastream.v1alpha1.CloudDatastreamResourcesProto
.internal_static_google_cloud_datastream_v1alpha1_OracleSchema_descriptor;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.OracleSchema getDefaultInstanceForType() {
return com.google.cloud.datastream.v1alpha1.OracleSchema.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.OracleSchema build() {
com.google.cloud.datastream.v1alpha1.OracleSchema result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.OracleSchema buildPartial() {
com.google.cloud.datastream.v1alpha1.OracleSchema result =
new com.google.cloud.datastream.v1alpha1.OracleSchema(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.datastream.v1alpha1.OracleSchema result) {
if (oracleTablesBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
oracleTables_ = java.util.Collections.unmodifiableList(oracleTables_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.oracleTables_ = oracleTables_;
} else {
result.oracleTables_ = oracleTablesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datastream.v1alpha1.OracleSchema result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.schemaName_ = schemaName_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datastream.v1alpha1.OracleSchema) {
return mergeFrom((com.google.cloud.datastream.v1alpha1.OracleSchema) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datastream.v1alpha1.OracleSchema other) {
if (other == com.google.cloud.datastream.v1alpha1.OracleSchema.getDefaultInstance())
return this;
if (!other.getSchemaName().isEmpty()) {
schemaName_ = other.schemaName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (oracleTablesBuilder_ == null) {
if (!other.oracleTables_.isEmpty()) {
if (oracleTables_.isEmpty()) {
oracleTables_ = other.oracleTables_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureOracleTablesIsMutable();
oracleTables_.addAll(other.oracleTables_);
}
onChanged();
}
} else {
if (!other.oracleTables_.isEmpty()) {
if (oracleTablesBuilder_.isEmpty()) {
oracleTablesBuilder_.dispose();
oracleTablesBuilder_ = null;
oracleTables_ = other.oracleTables_;
bitField0_ = (bitField0_ & ~0x00000002);
oracleTablesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getOracleTablesFieldBuilder()
: null;
} else {
oracleTablesBuilder_.addAllMessages(other.oracleTables_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
schemaName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.datastream.v1alpha1.OracleTable m =
input.readMessage(
com.google.cloud.datastream.v1alpha1.OracleTable.parser(),
extensionRegistry);
if (oracleTablesBuilder_ == null) {
ensureOracleTablesIsMutable();
oracleTables_.add(m);
} else {
oracleTablesBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object schemaName_ = "";
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema_name = 1;</code>
*
* @return The schemaName.
*/
public java.lang.String getSchemaName() {
java.lang.Object ref = schemaName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
schemaName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema_name = 1;</code>
*
* @return The bytes for schemaName.
*/
public com.google.protobuf.ByteString getSchemaNameBytes() {
java.lang.Object ref = schemaName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
schemaName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema_name = 1;</code>
*
* @param value The schemaName to set.
* @return This builder for chaining.
*/
public Builder setSchemaName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
schemaName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema_name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearSchemaName() {
schemaName_ = getDefaultInstance().getSchemaName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Schema name.
* </pre>
*
* <code>string schema_name = 1;</code>
*
* @param value The bytes for schemaName to set.
* @return This builder for chaining.
*/
public Builder setSchemaNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
schemaName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.datastream.v1alpha1.OracleTable> oracleTables_ =
java.util.Collections.emptyList();
private void ensureOracleTablesIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
oracleTables_ =
new java.util.ArrayList<com.google.cloud.datastream.v1alpha1.OracleTable>(
oracleTables_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.OracleTable,
com.google.cloud.datastream.v1alpha1.OracleTable.Builder,
com.google.cloud.datastream.v1alpha1.OracleTableOrBuilder>
oracleTablesBuilder_;
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1alpha1.OracleTable> getOracleTablesList() {
if (oracleTablesBuilder_ == null) {
return java.util.Collections.unmodifiableList(oracleTables_);
} else {
return oracleTablesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public int getOracleTablesCount() {
if (oracleTablesBuilder_ == null) {
return oracleTables_.size();
} else {
return oracleTablesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.OracleTable getOracleTables(int index) {
if (oracleTablesBuilder_ == null) {
return oracleTables_.get(index);
} else {
return oracleTablesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public Builder setOracleTables(
int index, com.google.cloud.datastream.v1alpha1.OracleTable value) {
if (oracleTablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOracleTablesIsMutable();
oracleTables_.set(index, value);
onChanged();
} else {
oracleTablesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public Builder setOracleTables(
int index, com.google.cloud.datastream.v1alpha1.OracleTable.Builder builderForValue) {
if (oracleTablesBuilder_ == null) {
ensureOracleTablesIsMutable();
oracleTables_.set(index, builderForValue.build());
onChanged();
} else {
oracleTablesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public Builder addOracleTables(com.google.cloud.datastream.v1alpha1.OracleTable value) {
if (oracleTablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOracleTablesIsMutable();
oracleTables_.add(value);
onChanged();
} else {
oracleTablesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public Builder addOracleTables(
int index, com.google.cloud.datastream.v1alpha1.OracleTable value) {
if (oracleTablesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureOracleTablesIsMutable();
oracleTables_.add(index, value);
onChanged();
} else {
oracleTablesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public Builder addOracleTables(
com.google.cloud.datastream.v1alpha1.OracleTable.Builder builderForValue) {
if (oracleTablesBuilder_ == null) {
ensureOracleTablesIsMutable();
oracleTables_.add(builderForValue.build());
onChanged();
} else {
oracleTablesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public Builder addOracleTables(
int index, com.google.cloud.datastream.v1alpha1.OracleTable.Builder builderForValue) {
if (oracleTablesBuilder_ == null) {
ensureOracleTablesIsMutable();
oracleTables_.add(index, builderForValue.build());
onChanged();
} else {
oracleTablesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public Builder addAllOracleTables(
java.lang.Iterable<? extends com.google.cloud.datastream.v1alpha1.OracleTable> values) {
if (oracleTablesBuilder_ == null) {
ensureOracleTablesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, oracleTables_);
onChanged();
} else {
oracleTablesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public Builder clearOracleTables() {
if (oracleTablesBuilder_ == null) {
oracleTables_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
oracleTablesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public Builder removeOracleTables(int index) {
if (oracleTablesBuilder_ == null) {
ensureOracleTablesIsMutable();
oracleTables_.remove(index);
onChanged();
} else {
oracleTablesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.OracleTable.Builder getOracleTablesBuilder(
int index) {
return getOracleTablesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.OracleTableOrBuilder getOracleTablesOrBuilder(
int index) {
if (oracleTablesBuilder_ == null) {
return oracleTables_.get(index);
} else {
return oracleTablesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public java.util.List<? extends com.google.cloud.datastream.v1alpha1.OracleTableOrBuilder>
getOracleTablesOrBuilderList() {
if (oracleTablesBuilder_ != null) {
return oracleTablesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(oracleTables_);
}
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.OracleTable.Builder addOracleTablesBuilder() {
return getOracleTablesFieldBuilder()
.addBuilder(com.google.cloud.datastream.v1alpha1.OracleTable.getDefaultInstance());
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public com.google.cloud.datastream.v1alpha1.OracleTable.Builder addOracleTablesBuilder(
int index) {
return getOracleTablesFieldBuilder()
.addBuilder(index, com.google.cloud.datastream.v1alpha1.OracleTable.getDefaultInstance());
}
/**
*
*
* <pre>
* Tables in the schema.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1alpha1.OracleTable oracle_tables = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1alpha1.OracleTable.Builder>
getOracleTablesBuilderList() {
return getOracleTablesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.OracleTable,
com.google.cloud.datastream.v1alpha1.OracleTable.Builder,
com.google.cloud.datastream.v1alpha1.OracleTableOrBuilder>
getOracleTablesFieldBuilder() {
if (oracleTablesBuilder_ == null) {
oracleTablesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1alpha1.OracleTable,
com.google.cloud.datastream.v1alpha1.OracleTable.Builder,
com.google.cloud.datastream.v1alpha1.OracleTableOrBuilder>(
oracleTables_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
oracleTables_ = null;
}
return oracleTablesBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1alpha1.OracleSchema)
}
// @@protoc_insertion_point(class_scope:google.cloud.datastream.v1alpha1.OracleSchema)
private static final com.google.cloud.datastream.v1alpha1.OracleSchema DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datastream.v1alpha1.OracleSchema();
}
public static com.google.cloud.datastream.v1alpha1.OracleSchema getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<OracleSchema> PARSER =
new com.google.protobuf.AbstractParser<OracleSchema>() {
@java.lang.Override
public OracleSchema parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<OracleSchema> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<OracleSchema> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datastream.v1alpha1.OracleSchema getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,612 | java-network-management/proto-google-cloud-network-management-v1beta1/src/main/java/com/google/cloud/networkmanagement/v1beta1/AppEngineVersionInfo.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkmanagement/v1beta1/trace.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkmanagement.v1beta1;
/**
*
*
* <pre>
* For display only. Metadata associated with an App Engine version.
* </pre>
*
* Protobuf type {@code google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo}
*/
public final class AppEngineVersionInfo extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo)
AppEngineVersionInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use AppEngineVersionInfo.newBuilder() to construct.
private AppEngineVersionInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AppEngineVersionInfo() {
displayName_ = "";
uri_ = "";
runtime_ = "";
environment_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new AppEngineVersionInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkmanagement.v1beta1.TraceProto
.internal_static_google_cloud_networkmanagement_v1beta1_AppEngineVersionInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkmanagement.v1beta1.TraceProto
.internal_static_google_cloud_networkmanagement_v1beta1_AppEngineVersionInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo.class,
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo.Builder.class);
}
public static final int DISPLAY_NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object displayName_ = "";
/**
*
*
* <pre>
* Name of an App Engine version.
* </pre>
*
* <code>string display_name = 1;</code>
*
* @return The displayName.
*/
@java.lang.Override
public java.lang.String getDisplayName() {
java.lang.Object ref = displayName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
displayName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Name of an App Engine version.
* </pre>
*
* <code>string display_name = 1;</code>
*
* @return The bytes for displayName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDisplayNameBytes() {
java.lang.Object ref = displayName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
displayName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int URI_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object uri_ = "";
/**
*
*
* <pre>
* URI of an App Engine version.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The uri.
*/
@java.lang.Override
public java.lang.String getUri() {
java.lang.Object ref = uri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
uri_ = s;
return s;
}
}
/**
*
*
* <pre>
* URI of an App Engine version.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The bytes for uri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = uri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
uri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int RUNTIME_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object runtime_ = "";
/**
*
*
* <pre>
* Runtime of the App Engine version.
* </pre>
*
* <code>string runtime = 3;</code>
*
* @return The runtime.
*/
@java.lang.Override
public java.lang.String getRuntime() {
java.lang.Object ref = runtime_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
runtime_ = s;
return s;
}
}
/**
*
*
* <pre>
* Runtime of the App Engine version.
* </pre>
*
* <code>string runtime = 3;</code>
*
* @return The bytes for runtime.
*/
@java.lang.Override
public com.google.protobuf.ByteString getRuntimeBytes() {
java.lang.Object ref = runtime_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
runtime_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int ENVIRONMENT_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object environment_ = "";
/**
*
*
* <pre>
* App Engine execution environment for a version.
* </pre>
*
* <code>string environment = 4;</code>
*
* @return The environment.
*/
@java.lang.Override
public java.lang.String getEnvironment() {
java.lang.Object ref = environment_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
environment_ = s;
return s;
}
}
/**
*
*
* <pre>
* App Engine execution environment for a version.
* </pre>
*
* <code>string environment = 4;</code>
*
* @return The bytes for environment.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEnvironmentBytes() {
java.lang.Object ref = environment_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
environment_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, displayName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, uri_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(runtime_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, runtime_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, environment_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, displayName_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(uri_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, uri_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(runtime_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, runtime_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(environment_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, environment_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo)) {
return super.equals(obj);
}
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo other =
(com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo) obj;
if (!getDisplayName().equals(other.getDisplayName())) return false;
if (!getUri().equals(other.getUri())) return false;
if (!getRuntime().equals(other.getRuntime())) return false;
if (!getEnvironment().equals(other.getEnvironment())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER;
hash = (53 * hash) + getDisplayName().hashCode();
hash = (37 * hash) + URI_FIELD_NUMBER;
hash = (53 * hash) + getUri().hashCode();
hash = (37 * hash) + RUNTIME_FIELD_NUMBER;
hash = (53 * hash) + getRuntime().hashCode();
hash = (37 * hash) + ENVIRONMENT_FIELD_NUMBER;
hash = (53 * hash) + getEnvironment().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* For display only. Metadata associated with an App Engine version.
* </pre>
*
* Protobuf type {@code google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo)
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkmanagement.v1beta1.TraceProto
.internal_static_google_cloud_networkmanagement_v1beta1_AppEngineVersionInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkmanagement.v1beta1.TraceProto
.internal_static_google_cloud_networkmanagement_v1beta1_AppEngineVersionInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo.class,
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo.Builder.class);
}
// Construct using com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
displayName_ = "";
uri_ = "";
runtime_ = "";
environment_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkmanagement.v1beta1.TraceProto
.internal_static_google_cloud_networkmanagement_v1beta1_AppEngineVersionInfo_descriptor;
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo
getDefaultInstanceForType() {
return com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo build() {
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo buildPartial() {
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo result =
new com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.displayName_ = displayName_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.uri_ = uri_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.runtime_ = runtime_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.environment_ = environment_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo) {
return mergeFrom((com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo other) {
if (other
== com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo.getDefaultInstance())
return this;
if (!other.getDisplayName().isEmpty()) {
displayName_ = other.displayName_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getUri().isEmpty()) {
uri_ = other.uri_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getRuntime().isEmpty()) {
runtime_ = other.runtime_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getEnvironment().isEmpty()) {
environment_ = other.environment_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
displayName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
uri_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
runtime_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 34:
{
environment_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object displayName_ = "";
/**
*
*
* <pre>
* Name of an App Engine version.
* </pre>
*
* <code>string display_name = 1;</code>
*
* @return The displayName.
*/
public java.lang.String getDisplayName() {
java.lang.Object ref = displayName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
displayName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Name of an App Engine version.
* </pre>
*
* <code>string display_name = 1;</code>
*
* @return The bytes for displayName.
*/
public com.google.protobuf.ByteString getDisplayNameBytes() {
java.lang.Object ref = displayName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
displayName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Name of an App Engine version.
* </pre>
*
* <code>string display_name = 1;</code>
*
* @param value The displayName to set.
* @return This builder for chaining.
*/
public Builder setDisplayName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
displayName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of an App Engine version.
* </pre>
*
* <code>string display_name = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearDisplayName() {
displayName_ = getDefaultInstance().getDisplayName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Name of an App Engine version.
* </pre>
*
* <code>string display_name = 1;</code>
*
* @param value The bytes for displayName to set.
* @return This builder for chaining.
*/
public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
displayName_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object uri_ = "";
/**
*
*
* <pre>
* URI of an App Engine version.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The uri.
*/
public java.lang.String getUri() {
java.lang.Object ref = uri_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
uri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* URI of an App Engine version.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return The bytes for uri.
*/
public com.google.protobuf.ByteString getUriBytes() {
java.lang.Object ref = uri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
uri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* URI of an App Engine version.
* </pre>
*
* <code>string uri = 2;</code>
*
* @param value The uri to set.
* @return This builder for chaining.
*/
public Builder setUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
uri_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* URI of an App Engine version.
* </pre>
*
* <code>string uri = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearUri() {
uri_ = getDefaultInstance().getUri();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* URI of an App Engine version.
* </pre>
*
* <code>string uri = 2;</code>
*
* @param value The bytes for uri to set.
* @return This builder for chaining.
*/
public Builder setUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
uri_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object runtime_ = "";
/**
*
*
* <pre>
* Runtime of the App Engine version.
* </pre>
*
* <code>string runtime = 3;</code>
*
* @return The runtime.
*/
public java.lang.String getRuntime() {
java.lang.Object ref = runtime_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
runtime_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Runtime of the App Engine version.
* </pre>
*
* <code>string runtime = 3;</code>
*
* @return The bytes for runtime.
*/
public com.google.protobuf.ByteString getRuntimeBytes() {
java.lang.Object ref = runtime_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
runtime_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Runtime of the App Engine version.
* </pre>
*
* <code>string runtime = 3;</code>
*
* @param value The runtime to set.
* @return This builder for chaining.
*/
public Builder setRuntime(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
runtime_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Runtime of the App Engine version.
* </pre>
*
* <code>string runtime = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearRuntime() {
runtime_ = getDefaultInstance().getRuntime();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Runtime of the App Engine version.
* </pre>
*
* <code>string runtime = 3;</code>
*
* @param value The bytes for runtime to set.
* @return This builder for chaining.
*/
public Builder setRuntimeBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
runtime_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object environment_ = "";
/**
*
*
* <pre>
* App Engine execution environment for a version.
* </pre>
*
* <code>string environment = 4;</code>
*
* @return The environment.
*/
public java.lang.String getEnvironment() {
java.lang.Object ref = environment_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
environment_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* App Engine execution environment for a version.
* </pre>
*
* <code>string environment = 4;</code>
*
* @return The bytes for environment.
*/
public com.google.protobuf.ByteString getEnvironmentBytes() {
java.lang.Object ref = environment_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
environment_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* App Engine execution environment for a version.
* </pre>
*
* <code>string environment = 4;</code>
*
* @param value The environment to set.
* @return This builder for chaining.
*/
public Builder setEnvironment(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
environment_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* App Engine execution environment for a version.
* </pre>
*
* <code>string environment = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearEnvironment() {
environment_ = getDefaultInstance().getEnvironment();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* App Engine execution environment for a version.
* </pre>
*
* <code>string environment = 4;</code>
*
* @param value The bytes for environment to set.
* @return This builder for chaining.
*/
public Builder setEnvironmentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
environment_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo)
private static final com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo();
}
public static com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AppEngineVersionInfo> PARSER =
new com.google.protobuf.AbstractParser<AppEngineVersionInfo>() {
@java.lang.Override
public AppEngineVersionInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<AppEngineVersionInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AppEngineVersionInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkmanagement.v1beta1.AppEngineVersionInfo
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/pdfbox | 35,822 | pdfbox/src/main/java/org/apache/pdfbox/pdmodel/interactive/annotation/handlers/CloudyBorder.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pdfbox.pdmodel.interactive.annotation.handlers;
import java.awt.geom.AffineTransform;
import java.awt.geom.Ellipse2D;
import java.awt.geom.PathIterator;
import java.awt.geom.Point2D;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.apache.pdfbox.pdmodel.PDAppearanceContentStream;
/**
* Generates annotation appearances with a cloudy border.
* <p>
* Dashed stroke styles are not recommended with cloudy borders. The result would
* not look good because some parts of the arcs are traced twice by the stroked
* path. Actually Acrobat Reader's line style dialog does not allow to choose a
* dashed and a cloudy style at the same time.
*/
class CloudyBorder
{
private static final double ANGLE_180_DEG = Math.PI;
private static final double ANGLE_90_DEG = Math.PI / 2;
private static final double ANGLE_34_DEG = Math.toRadians(34);
private static final double ANGLE_30_DEG = Math.toRadians(30);
private static final double ANGLE_12_DEG = Math.toRadians(12);
private final PDAppearanceContentStream output;
private final PDRectangle annotRect;
private final double intensity;
private final double lineWidth;
private PDRectangle rectWithDiff;
private boolean outputStarted = false;
private double bboxMinX;
private double bboxMinY;
private double bboxMaxX;
private double bboxMaxY;
/**
* Creates a new <code>CloudyBorder</code> that writes to the specified
* content stream.
*
* @param stream content stream
* @param intensity intensity of cloudy effect (entry <code>I</code>); typically 1.0 or 2.0
* @param lineWidth line width for annotation border (entry <code>W</code>)
* @param rect annotation rectangle (entry <code>Rect</code>)
*/
CloudyBorder(PDAppearanceContentStream stream, double intensity,
double lineWidth, PDRectangle rect)
{
this.output = stream;
this.intensity = intensity;
this.lineWidth = lineWidth;
this.annotRect = rect;
}
/**
* Creates a cloudy border for a rectangular annotation.
* The rectangle is specified by the <code>RD</code> entry and the
* <code>Rect</code> entry that was passed in to the constructor.
* <p>
* This can be used for Square and FreeText annotations. However, this does
* not produce the text and the callout line for FreeTexts.
*
* @param rd entry <code>RD</code>, or null if the entry does not exist
* @throws IOException If there is an error writing to the stream.
*/
void createCloudyRectangle(PDRectangle rd) throws IOException
{
rectWithDiff = applyRectDiff(rd, (float) (lineWidth / 2));
double left = rectWithDiff.getLowerLeftX();
double bottom = rectWithDiff.getLowerLeftY();
double right = rectWithDiff.getUpperRightX();
double top = rectWithDiff.getUpperRightY();
cloudyRectangleImpl(left, bottom, right, top, false);
finish();
}
/**
* Creates a cloudy border for a Polygon annotation.
*
* @param path polygon path
* @throws IOException If there is an error writing to the stream.
*/
void createCloudyPolygon(float[][] path) throws IOException
{
int n = path.length;
Point2D.Double[] polygon = new Point2D.Double[n];
for (int i = 0; i < n; i++)
{
float[] array = path[i];
if (array.length == 2)
{
polygon[i] = new Point2D.Double(array[0], array[1]);
}
else if (array.length == 6)
{
// TODO Curve segments are not yet supported in cloudy border.
polygon[i] = new Point2D.Double(array[4], array[5]);
}
}
cloudyPolygonImpl(polygon, false);
finish();
}
/**
* Creates a cloudy border for a Circle annotation.
* The ellipse is specified by the <code>RD</code> entry and the
* <code>Rect</code> entry that was passed in to the constructor.
*
* @param rd entry <code>RD</code>, or null if the entry does not exist
* @throws IOException If there is an error writing to the stream.
*/
void createCloudyEllipse(PDRectangle rd) throws IOException
{
rectWithDiff = applyRectDiff(rd, 0);
double left = rectWithDiff.getLowerLeftX();
double bottom = rectWithDiff.getLowerLeftY();
double right = rectWithDiff.getUpperRightX();
double top = rectWithDiff.getUpperRightY();
cloudyEllipseImpl(left, bottom, right, top);
finish();
}
/**
* Returns the <code>BBox</code> entry (bounding box) for the
* appearance stream form XObject.
*
* @return Bounding box for appearance stream form XObject.
*/
PDRectangle getBBox()
{
return getRectangle();
}
/**
* Returns the updated <code>Rect</code> entry for the annotation.
* The rectangle completely contains the cloudy border.
*
* @return Annotation <code>Rect</code>.
*/
PDRectangle getRectangle()
{
return new PDRectangle((float)bboxMinX, (float)bboxMinY,
(float)(bboxMaxX - bboxMinX), (float)(bboxMaxY - bboxMinY));
}
/**
* Returns the <code>Matrix</code> entry for the appearance stream form XObject.
*
* @return Matrix for appearance stream form XObject.
*/
AffineTransform getMatrix()
{
return AffineTransform.getTranslateInstance(-bboxMinX, -bboxMinY);
}
/**
* Returns the updated <code>RD</code> entry for Square and Circle annotations.
*
* @return Annotation <code>RD</code> value.
*/
PDRectangle getRectDifference()
{
if (annotRect == null)
{
float d = (float)lineWidth / 2;
return new PDRectangle(d, d, (float)lineWidth, (float)lineWidth);
}
PDRectangle re = (rectWithDiff != null) ? rectWithDiff : annotRect;
float left = re.getLowerLeftX() - (float)bboxMinX;
float bottom = re.getLowerLeftY() - (float)bboxMinY;
float right = (float)bboxMaxX - re.getUpperRightX();
float top = (float)bboxMaxY - re.getUpperRightY();
return new PDRectangle(left, bottom, right - left, top - bottom);
}
private static double cosine(double dx, double hypot)
{
if (Double.compare(hypot, 0.0) == 0)
{
return 0;
}
return dx / hypot;
}
private static double sine(double dy, double hypot)
{
if (Double.compare(hypot, 0.0) == 0)
{
return 0;
}
return dy / hypot;
}
/**
* Cloudy rectangle implementation is based on converting the rectangle
* to a polygon.
*/
private void cloudyRectangleImpl(double left, double bottom,
double right, double top, boolean isEllipse) throws IOException
{
double w = right - left;
double h = top - bottom;
if (intensity <= 0.0)
{
output.addRect((float)left, (float)bottom, (float)w, (float)h);
bboxMinX = left;
bboxMinY = bottom;
bboxMaxX = right;
bboxMaxY = top;
return;
}
// Make a polygon with direction equal to the positive angle direction.
Point2D.Double[] polygon;
if (w < 1.0)
{
polygon = new Point2D.Double[]
{
new Point2D.Double(left, bottom), new Point2D.Double(left, top),
new Point2D.Double(left, bottom)
};
}
else if (h < 1.0)
{
polygon = new Point2D.Double[]
{
new Point2D.Double(left, bottom), new Point2D.Double(right, bottom),
new Point2D.Double(left, bottom)
};
}
else
{
polygon = new Point2D.Double[]
{
new Point2D.Double(left, bottom), new Point2D.Double(right, bottom),
new Point2D.Double(right, top), new Point2D.Double(left, top),
new Point2D.Double(left, bottom)
};
}
cloudyPolygonImpl(polygon, isEllipse);
}
/**
* Cloudy polygon implementation.
*
* @param vertices polygon vertices; first and last point must be equal
* @param isEllipse specifies if the polygon represents an ellipse
*/
private void cloudyPolygonImpl(Point2D.Double[] vertices, boolean isEllipse)
throws IOException
{
Point2D.Double[] polygon = removeZeroLengthSegments(vertices);
getPositivePolygon(polygon);
int numPoints = polygon.length;
if (numPoints < 2)
{
return;
}
if (intensity <= 0.0)
{
moveTo(polygon[0]);
for (int i = 1; i < numPoints; i++)
{
lineTo(polygon[i]);
}
return;
}
double cloudRadius = isEllipse ? getEllipseCloudRadius() : getPolygonCloudRadius();
if (cloudRadius < 0.5)
{
cloudRadius = 0.5;
}
final double k = Math.cos(ANGLE_34_DEG);
final double advIntermDefault = 2 * k * cloudRadius;
final double advCornerDefault = k * cloudRadius;
double[] array = new double[2];
double anglePrev = 0;
// The number of curls per polygon segment is hardly ever an integer,
// so the length of some curls must be adjustable. We adjust the angle
// of the trailing arc of corner curls and the leading arc of the first
// intermediate curl.
// In each polygon segment, we have n intermediate curls plus one half of a
// corner curl at each end. One of the n intermediate curls is adjustable.
// Thus the number of fixed (or unadjusted) intermediate curls is n - 1.
// Find the adjusted angle `alpha` for the first corner curl.
int n0 = computeParamsPolygon(advIntermDefault, advCornerDefault, k, cloudRadius,
polygon[numPoints - 2].distance(polygon[0]), array);
double alphaPrev = (n0 == 0) ? array[0] : ANGLE_34_DEG;
for (int j = 0; j + 1 < numPoints; j++)
{
Point2D.Double pt = polygon[j];
Point2D.Double ptNext = polygon[j + 1];
double length = pt.distance(ptNext);
if (Double.compare(length, 0.0) == 0)
{
alphaPrev = ANGLE_34_DEG;
continue;
}
// n is the number of intermediate curls in the current polygon segment.
int n = computeParamsPolygon(advIntermDefault, advCornerDefault, k,
cloudRadius, length, array);
if (n < 0)
{
if (!outputStarted)
{
moveTo(pt);
}
continue;
}
double alpha = array[0];
double dx = array[1];
double angleCur = Math.atan2(ptNext.y - pt.y, ptNext.x - pt.x);
if (j == 0)
{
Point2D.Double ptPrev = polygon[numPoints - 2];
anglePrev = Math.atan2(pt.y - ptPrev.y, pt.x - ptPrev.x);
}
double cos = cosine(ptNext.x - pt.x, length);
double sin = sine(ptNext.y - pt.y, length);
double x = pt.x;
double y = pt.y;
addCornerCurl(anglePrev, angleCur, cloudRadius, pt.x, pt.y, alpha,
alphaPrev, !outputStarted);
// Proceed to the center point of the first intermediate curl.
double adv = 2 * k * cloudRadius + 2 * dx;
x += adv * cos;
y += adv * sin;
// Create the first intermediate curl.
int numInterm = n;
if (n >= 1)
{
addFirstIntermediateCurl(angleCur, cloudRadius, alpha, x, y);
x += advIntermDefault * cos;
y += advIntermDefault * sin;
numInterm = n - 1;
}
// Create one intermediate curl and replicate it along the polygon segment.
Point2D.Double[] template = getIntermediateCurlTemplate(angleCur, cloudRadius);
for (int i = 0; i < numInterm; i++)
{
outputCurlTemplate(template, x, y);
x += advIntermDefault * cos;
y += advIntermDefault * sin;
}
anglePrev = angleCur;
alphaPrev = (n == 0) ? alpha : ANGLE_34_DEG;
}
}
/**
* Computes parameters for a cloudy polygon: n, alpha, and dx.
*/
private int computeParamsPolygon(double advInterm, double advCorner, double k,
double r, double length, double[] array)
{
if (Double.compare(length, 0.0) == 0)
{
array[0] = ANGLE_34_DEG;
array[1] = 0;
return -1;
}
// n is the number of intermediate curls in the current polygon segment
int n = (int) Math.ceil((length - 2 * advCorner) / advInterm);
// Fitting error along polygon segment
double e = length - (2 * advCorner + n * advInterm);
// Fitting error per each adjustable half curl
double dx = e / 2;
// Convert fitting error to an angle that can be used to control arcs.
double arg = (k * r + dx) / r;
double alpha = (arg < -1.0 || arg > 1.0) ? 0.0 : Math.acos(arg);
array[0] = alpha;
array[1] = dx;
return n;
}
/**
* Creates a corner curl for polygons and ellipses.
*/
private void addCornerCurl(double anglePrev, double angleCur, double radius,
double cx, double cy, double alpha, double alphaPrev, boolean addMoveTo)
throws IOException
{
double a = anglePrev + ANGLE_180_DEG + alphaPrev;
double b = anglePrev + ANGLE_180_DEG + alphaPrev - Math.toRadians(22);
getArcSegment(a, b, cx, cy, radius, radius, null, addMoveTo);
a = b;
b = angleCur - alpha;
getArc(a, b, radius, radius, cx, cy, null, false);
}
/**
* Generates the first intermediate curl for a cloudy polygon.
*/
private void addFirstIntermediateCurl(double angleCur, double r, double alpha,
double cx, double cy) throws IOException
{
double a = angleCur + ANGLE_180_DEG;
getArcSegment(a + alpha, a + alpha - ANGLE_30_DEG, cx, cy, r, r, null, false);
getArcSegment(a + alpha - ANGLE_30_DEG, a + ANGLE_90_DEG, cx, cy, r, r, null, false);
getArcSegment(a + ANGLE_90_DEG, a + ANGLE_180_DEG - ANGLE_34_DEG,
cx, cy, r, r, null, false);
}
/**
* Returns a template for intermediate curls in a cloudy polygon.
*/
private Point2D.Double[] getIntermediateCurlTemplate(double angleCur, double r)
throws IOException
{
ArrayList<Point2D.Double> points = new ArrayList<>();
double a = angleCur + ANGLE_180_DEG;
getArcSegment(a + ANGLE_34_DEG, a + ANGLE_12_DEG, 0, 0, r, r, points, false);
getArcSegment(a + ANGLE_12_DEG, a + ANGLE_90_DEG, 0, 0, r, r, points, false);
getArcSegment(a + ANGLE_90_DEG, a + ANGLE_180_DEG - ANGLE_34_DEG,
0, 0, r, r, points, false);
return points.toArray(Point2D.Double[]::new);
}
/**
* Writes the curl template points to the output and applies translation (x, y).
*/
private void outputCurlTemplate(Point2D.Double[] template, double x, double y)
throws IOException
{
int n = template.length;
int i = 0;
if ((n % 3) == 1)
{
Point2D.Double a = template[0];
moveTo(a.x + x, a.y + y);
i++;
}
for (; i + 2 < n; i += 3)
{
Point2D.Double a = template[i];
Point2D.Double b = template[i + 1];
Point2D.Double c = template[i + 2];
curveTo(a.x + x, a.y + y, b.x + x, b.y + y, c.x + x, c.y + y);
}
}
private PDRectangle applyRectDiff(PDRectangle rd, float min)
{
float rectLeft = annotRect.getLowerLeftX();
float rectBottom = annotRect.getLowerLeftY();
float rectRight = annotRect.getUpperRightX();
float rectTop = annotRect.getUpperRightY();
// Normalize
rectLeft = Math.min(rectLeft, rectRight);
rectBottom = Math.min(rectBottom, rectTop);
rectRight = Math.max(rectLeft, rectRight);
rectTop = Math.max(rectBottom, rectTop);
float rdLeft;
float rdBottom;
float rdRight;
float rdTop;
if (rd != null)
{
rdLeft = Math.max(rd.getLowerLeftX(), min);
rdBottom = Math.max(rd.getLowerLeftY(), min);
rdRight = Math.max(rd.getUpperRightX(), min);
rdTop = Math.max(rd.getUpperRightY(), min);
}
else
{
rdLeft = min;
rdBottom = min;
rdRight = min;
rdTop = min;
}
rectLeft += rdLeft;
rectBottom += rdBottom;
rectRight -= rdRight;
rectTop -= rdTop;
return new PDRectangle(rectLeft, rectBottom, rectRight - rectLeft, rectTop - rectBottom);
}
private void reversePolygon(Point2D.Double[] points)
{
int len = points.length;
int n = len / 2;
for (int i = 0; i < n; i++)
{
int j = len - i - 1;
Point2D.Double pi = points[i];
Point2D.Double pj = points[j];
points[i] = pj;
points[j] = pi;
}
}
/**
* Makes a polygon whose direction is the same as the positive angle
* direction in the coordinate system.
* The polygon must not intersect itself.
*/
private void getPositivePolygon(Point2D.Double[] points)
{
if (getPolygonDirection(points) < 0)
{
reversePolygon(points);
}
}
/**
* Returns the direction of the specified polygon.
* A positive value indicates that the polygon's direction is the same as the
* direction of positive angles in the coordinate system.
* A negative value indicates the opposite direction.
*
* The polygon must not intersect itself. A 2-point polygon is not acceptable.
* This is based on the "shoelace formula".
*/
private double getPolygonDirection(Point2D.Double[] points)
{
double a = 0;
int len = points.length;
for (int i = 0; i < len; i++)
{
int j = (i + 1) % len;
a += points[i].x * points[j].y - points[i].y * points[j].x;
}
return a;
}
/**
* Creates one or more Bézier curves that represent an elliptical arc.
* Angles are in radians.
* The arc will always proceed in the positive angle direction.
* If the argument `out` is null, this writes the results to the instance
* variable `output`.
*/
private void getArc(double startAng, double endAng, double rx, double ry,
double cx, double cy, ArrayList<Point2D.Double> out, boolean addMoveTo) throws IOException
{
final double angleIncr = Math.PI / 2;
double startx = rx * Math.cos(startAng) + cx;
double starty = ry * Math.sin(startAng) + cy;
double angleTodo = endAng - startAng;
while (angleTodo < 0)
{
angleTodo += 2 * Math.PI;
}
double sweep = angleTodo;
double angleDone = 0;
if (addMoveTo)
{
if (out != null)
{
out.add(new Point2D.Double(startx, starty));
}
else
{
moveTo(startx, starty);
}
}
while (angleTodo > angleIncr)
{
getArcSegment(startAng + angleDone,
startAng + angleDone + angleIncr, cx, cy, rx, ry, out, false);
angleDone += angleIncr;
angleTodo -= angleIncr;
}
if (angleTodo > 0)
{
getArcSegment(startAng + angleDone, startAng + sweep, cx, cy, rx, ry, out, false);
}
}
/**
* Creates a single Bézier curve that represents a section of an elliptical
* arc. The sweep angle of the section must not be larger than 90 degrees.
* If argument `out` is null, this writes the results to the instance
* variable `output`.
*/
private void getArcSegment(double startAng, double endAng, double cx, double cy,
double rx, double ry, ArrayList<Point2D.Double> out, boolean addMoveTo) throws IOException
{
// Algorithm is from the FAQ of the news group comp.text.pdf
double cosA = Math.cos(startAng);
double sinA = Math.sin(startAng);
double cosB = Math.cos(endAng);
double sinB = Math.sin(endAng);
double denom = Math.sin((endAng - startAng) / 2.0);
if (Double.compare(denom, 0.0) == 0)
{
// This can happen only if endAng == startAng.
// The arc sweep angle is zero, so we create no arc at all.
if (addMoveTo)
{
double xs = cx + rx * cosA;
double ys = cy + ry * sinA;
if (out != null)
{
out.add(new Point2D.Double(xs, ys));
}
else
{
moveTo(xs, ys);
}
}
return;
}
double bcp = 1.333333333 * (1 - Math.cos((endAng - startAng) / 2.0)) / denom;
double p1x = cx + rx * (cosA - bcp * sinA);
double p1y = cy + ry * (sinA + bcp * cosA);
double p2x = cx + rx * (cosB + bcp * sinB);
double p2y = cy + ry * (sinB - bcp * cosB);
double p3x = cx + rx * cosB;
double p3y = cy + ry * sinB;
if (addMoveTo)
{
double xs = cx + rx * cosA;
double ys = cy + ry * sinA;
if (out != null)
{
out.add(new Point2D.Double(xs, ys));
}
else
{
moveTo(xs, ys);
}
}
if (out != null)
{
out.add(new Point2D.Double(p1x, p1y));
out.add(new Point2D.Double(p2x, p2y));
out.add(new Point2D.Double(p3x, p3y));
}
else
{
curveTo(p1x, p1y, p2x, p2y, p3x, p3y);
}
}
/**
* Flattens an ellipse into a polygon.
*/
private static Point2D.Double[] flattenEllipse(double left, double bottom,
double right, double top)
{
Ellipse2D.Double ellipse = new Ellipse2D.Double(left, bottom, right - left, top - bottom);
final double flatness = 0.50;
PathIterator iterator = ellipse.getPathIterator(null, flatness);
double[] coords = new double[6];
ArrayList<Point2D.Double> points = new ArrayList<>();
while (!iterator.isDone())
{
switch (iterator.currentSegment(coords))
{
case PathIterator.SEG_MOVETO:
case PathIterator.SEG_LINETO:
points.add(new Point2D.Double(coords[0], coords[1]));
break;
// Curve segments are not expected because the path iterator is
// flattened. SEG_CLOSE can be ignored.
default:
break;
}
iterator.next();
}
int size = points.size();
final double closeTestLimit = 0.05;
if (size >= 2 && points.get(size - 1).distance(points.get(0)) > closeTestLimit)
{
points.add(points.get(points.size() - 1));
}
return points.toArray(Point2D.Double[]::new);
}
/**
* Cloudy ellipse implementation.
*/
private void cloudyEllipseImpl(final double leftOrig, final double bottomOrig,
final double rightOrig, final double topOrig) throws IOException
{
if (intensity <= 0.0)
{
drawBasicEllipse(leftOrig, bottomOrig, rightOrig, topOrig);
return;
}
double left = leftOrig;
double bottom = bottomOrig;
double right = rightOrig;
double top = topOrig;
double width = right - left;
double height = top - bottom;
double cloudRadius = getEllipseCloudRadius();
// Omit cloudy border if the ellipse is very small.
final double threshold1 = 0.50 * cloudRadius;
if (width < threshold1 && height < threshold1)
{
drawBasicEllipse(left, bottom, right, top);
return;
}
// Draw a cloudy rectangle instead of an ellipse when the
// width or height is very small.
final double threshold2 = 5;
if ((width < threshold2 && height > 20) || (width > 20 && height < threshold2))
{
cloudyRectangleImpl(left, bottom, right, top, true);
return;
}
// Decrease radii (while center point does not move). This makes the
// "tails" of the curls almost touch the ellipse outline.
double radiusAdj = Math.sin(ANGLE_12_DEG) * cloudRadius - 1.50;
if (width > 2 * radiusAdj)
{
left += radiusAdj;
right -= radiusAdj;
}
else
{
double mid = (left + right) / 2;
left = mid - 0.10;
right = mid + 0.10;
}
if (height > 2 * radiusAdj)
{
top -= radiusAdj;
bottom += radiusAdj;
}
else
{
double mid = (top + bottom) / 2;
top = mid + 0.10;
bottom = mid - 0.10;
}
// Flatten the ellipse into a polygon. The segment lengths of the flattened
// result don't need to be extremely short because the loop below is able to
// interpolate between polygon points when it computes the center points
// at which each curl is placed.
Point2D.Double[] flatPolygon = flattenEllipse(left, bottom, right, top);
int numPoints = flatPolygon.length;
if (numPoints < 2)
{
return;
}
double totLen = 0;
for(int i = 1; i < numPoints; i++){
totLen += flatPolygon[i - 1].distance(flatPolygon[i]);
}
final double k = Math.cos(ANGLE_34_DEG);
double curlAdvance = 2 * k * cloudRadius;
int n = (int) Math.ceil(totLen / curlAdvance);
if (n < 2)
{
drawBasicEllipse(leftOrig, bottomOrig, rightOrig, topOrig);
return;
}
curlAdvance = totLen / n;
cloudRadius = curlAdvance / (2 * k);
if (cloudRadius < 0.5)
{
cloudRadius = 0.5;
curlAdvance = 2 * k * cloudRadius;
}
else if (cloudRadius < 3.0)
{
// Draw a small circle when the scaled radius becomes very small.
// This happens also if intensity is much smaller than 1.
drawBasicEllipse(leftOrig, bottomOrig, rightOrig, topOrig);
return;
}
// Construct centerPoints array, in which each point is the center point of a curl.
// The length of each centerPoints segment ideally equals curlAdv but that
// is not true in regions where the ellipse curvature is high.
int centerPointsLength = n;
Point2D.Double[] centerPoints = new Point2D.Double[centerPointsLength];
int centerPointsIndex = 0;
double lengthRemain = 0;
final double comparisonToler = lineWidth * 0.10;
for (int i = 0; i + 1 < numPoints; i++)
{
Point2D.Double p1 = flatPolygon[i];
Point2D.Double p2 = flatPolygon[i + 1];
double dx = p2.x - p1.x;
double dy = p2.y - p1.y;
double length = p1.distance(p2);
if (Double.compare(length, 0.0) == 0)
{
continue;
}
double lengthTodo = length + lengthRemain;
if (lengthTodo >= curlAdvance - comparisonToler || i == numPoints - 2)
{
double cos = cosine(dx, length);
double sin = sine(dy, length);
double d = curlAdvance - lengthRemain;
do
{
double x = p1.x + d * cos;
double y = p1.y + d * sin;
if (centerPointsIndex < centerPointsLength)
{
centerPoints[centerPointsIndex++] = new Point2D.Double(x, y);
}
lengthTodo -= curlAdvance;
d += curlAdvance;
}
while (lengthTodo >= curlAdvance - comparisonToler);
lengthRemain = lengthTodo;
if (lengthRemain < 0)
{
lengthRemain = 0;
}
}
else
{
lengthRemain += length;
}
}
// Note: centerPoints does not repeat the first point as the last point
// to create a "closing" segment.
// Place a curl at each point of the centerPoints array.
// In regions where the ellipse curvature is high, the centerPoints segments
// are shorter than the actual distance along the ellipse. Thus we must
// again compute arc adjustments like in cloudy polygons.
numPoints = centerPointsIndex;
double anglePrev = 0;
double alphaPrev = 0;
for (int i = 0; i < numPoints; i++)
{
int idxNext = i + 1;
if (i + 1 >= numPoints)
{
idxNext = 0;
}
Point2D.Double pt = centerPoints[i];
Point2D.Double ptNext = centerPoints[idxNext];
if (i == 0)
{
Point2D.Double ptPrev = centerPoints[numPoints - 1];
anglePrev = Math.atan2(pt.y - ptPrev.y, pt.x - ptPrev.x);
alphaPrev = computeParamsEllipse(ptPrev, pt, cloudRadius, curlAdvance);
}
double angleCur = Math.atan2(ptNext.y - pt.y, ptNext.x - pt.x);
double alpha = computeParamsEllipse(pt, ptNext, cloudRadius, curlAdvance);
addCornerCurl(anglePrev, angleCur, cloudRadius, pt.x, pt.y, alpha,
alphaPrev, !outputStarted);
anglePrev = angleCur;
alphaPrev = alpha;
}
}
/**
* Computes the alpha parameter for an ellipse curl.
*/
private double computeParamsEllipse(Point2D.Double pt, Point2D.Double ptNext,
double r, double curlAdv)
{
double length = pt.distance(ptNext);
if (Double.compare(length, 0.0) == 0)
{
return ANGLE_34_DEG;
}
double e = length - curlAdv;
double arg = (curlAdv / 2 + e / 2) / r;
return (arg < -1.0 || arg > 1.0) ? 0.0 : Math.acos(arg);
}
private Point2D.Double[] removeZeroLengthSegments(Point2D.Double[] polygon)
{
int np = polygon.length;
if (np <= 2)
{
return polygon;
}
final double toler = 0.50;
int npNew = np;
Point2D.Double ptPrev = polygon[0];
// Don't remove the last point if it equals the first point.
for (int i = 1; i < np; i++)
{
Point2D.Double pt = polygon[i];
if (Math.abs(pt.x - ptPrev.x) < toler && Math.abs(pt.y - ptPrev.y) < toler)
{
polygon[i] = null;
npNew--;
}
ptPrev = pt;
}
if (npNew == np)
{
return polygon;
}
Point2D.Double[] polygonNew = new Point2D.Double[npNew];
int j = 0;
for (int i = 0; i < np; i++)
{
Point2D.Double pt = polygon[i];
if (pt != null)
{
polygonNew[j++] = pt;
}
}
return polygonNew;
}
/**
* Draws an ellipse without a cloudy border effect.
*/
private void drawBasicEllipse(double left, double bottom, double right, double top)
throws IOException
{
double rx = Math.abs(right - left) / 2;
double ry = Math.abs(top - bottom) / 2;
double cx = (left + right) / 2;
double cy = (bottom + top) / 2;
getArc(0, 2 * Math.PI, rx, ry, cx, cy, null, true);
}
private void beginOutput(double x, double y) throws IOException
{
bboxMinX = x;
bboxMinY = y;
bboxMaxX = x;
bboxMaxY = y;
outputStarted = true;
// Set line join to bevel to avoid spikes
output.setLineJoinStyle(2);
}
private void updateBBox(double x, double y)
{
bboxMinX = Math.min(bboxMinX, x);
bboxMinY = Math.min(bboxMinY, y);
bboxMaxX = Math.max(bboxMaxX, x);
bboxMaxY = Math.max(bboxMaxY, y);
}
private void moveTo(Point2D.Double p) throws IOException
{
moveTo(p.x, p.y);
}
private void moveTo(double x, double y) throws IOException
{
if (outputStarted)
{
updateBBox(x, y);
}
else
{
beginOutput(x, y);
}
output.moveTo((float)x, (float)y);
}
private void lineTo(Point2D.Double p) throws IOException
{
lineTo(p.x, p.y);
}
private void lineTo(double x, double y) throws IOException
{
if (outputStarted)
{
updateBBox(x, y);
}
else
{
beginOutput(x, y);
}
output.lineTo((float)x, (float)y);
}
private void curveTo(double ax, double ay, double bx, double by, double cx, double cy)
throws IOException
{
updateBBox(ax, ay);
updateBBox(bx, by);
updateBBox(cx, cy);
output.curveTo((float)ax, (float)ay, (float)bx, (float)by, (float)cx, (float)cy);
}
private void finish() throws IOException
{
if (outputStarted)
{
output.closePath();
}
if (lineWidth > 0)
{
double d = lineWidth / 2;
bboxMinX -= d;
bboxMinY -= d;
bboxMaxX += d;
bboxMaxY += d;
}
}
private double getEllipseCloudRadius()
{
// Equation deduced from Acrobat Reader's appearance streams. Circle
// annotations have a slightly larger radius than Polygons and Squares.
return 4.75 * intensity + 0.5 * lineWidth;
}
private double getPolygonCloudRadius()
{
// Equation deduced from Acrobat Reader's appearance streams.
return 4 * intensity + 0.5 * lineWidth;
}
}
|
apache/qpid-broker-j | 35,735 | broker-core/src/main/java/org/apache/qpid/server/transport/AbstractAMQPConnection.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.qpid.server.transport;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.security.AccessControlContext;
import java.security.AccessControlException;
import java.security.AccessController;
import java.security.Principal;
import java.security.PrivilegedAction;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicLong;
import javax.security.auth.Subject;
import javax.security.auth.SubjectDomainCombiner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.qpid.server.bytebuffer.QpidByteBuffer;
import org.apache.qpid.server.configuration.updater.TaskExecutor;
import org.apache.qpid.server.connection.ConnectionPrincipal;
import org.apache.qpid.server.logging.EventLogger;
import org.apache.qpid.server.logging.EventLoggerProvider;
import org.apache.qpid.server.logging.Outcome;
import org.apache.qpid.server.logging.LogSubject;
import org.apache.qpid.server.logging.messages.ConnectionMessages;
import org.apache.qpid.server.logging.subjects.ConnectionLogSubject;
import org.apache.qpid.server.model.AbstractConfiguredObject;
import org.apache.qpid.server.model.Broker;
import org.apache.qpid.server.model.ConfiguredObject;
import org.apache.qpid.server.model.Connection;
import org.apache.qpid.server.model.ContextProvider;
import org.apache.qpid.server.model.NamedAddressSpace;
import org.apache.qpid.server.model.Port;
import org.apache.qpid.server.model.Protocol;
import org.apache.qpid.server.model.Session;
import org.apache.qpid.server.model.State;
import org.apache.qpid.server.model.TaskExecutorProvider;
import org.apache.qpid.server.model.Transport;
import org.apache.qpid.server.model.port.AmqpPort;
import org.apache.qpid.server.security.auth.AuthenticatedPrincipal;
import org.apache.qpid.server.security.auth.sasl.SaslSettings;
import org.apache.qpid.server.session.AbstractAMQPSession;
import org.apache.qpid.server.stats.StatisticsGatherer;
import org.apache.qpid.server.store.StoreException;
import org.apache.qpid.server.transport.network.NetworkConnection;
import org.apache.qpid.server.transport.network.Ticker;
import org.apache.qpid.server.txn.FlowToDiskTransactionObserver;
import org.apache.qpid.server.txn.LocalTransaction;
import org.apache.qpid.server.txn.ServerTransaction;
import org.apache.qpid.server.txn.TransactionObserver;
import org.apache.qpid.server.util.Action;
import org.apache.qpid.server.util.ConnectionScopedRuntimeException;
import org.apache.qpid.server.util.FixedKeyMapCreator;
import org.apache.qpid.server.util.ServerScopedRuntimeException;
import org.apache.qpid.server.virtualhost.QueueManagingVirtualHost;
public abstract class AbstractAMQPConnection<C extends AbstractAMQPConnection<C,T>, T>
extends AbstractConfiguredObject<C>
implements ProtocolEngine, AMQPConnection<C>, EventLoggerProvider, SaslSettings
{
public static final FixedKeyMapCreator PUBLISH_ACTION_MAP_CREATOR = new FixedKeyMapCreator("routingKey", "immediate");
private static final String OPEN_TRANSACTION_TIMEOUT_ERROR = "Open transaction timed out";
private static final String IDLE_TRANSACTION_TIMEOUT_ERROR = "Idle transaction timed out";
private static final Logger LOGGER = LoggerFactory.getLogger(AbstractAMQPConnection.class);
private final Broker<?> _broker;
private final ServerNetworkConnection _network;
private final AmqpPort<?> _port;
private final Transport _transport;
private final Protocol _protocol;
private final long _connectionId;
private final AggregateTicker _aggregateTicker;
private final Subject _subject = new Subject();
private final List<Action<? super C>> _connectionCloseTaskList = new CopyOnWriteArrayList<>();
private final LogSubject _logSubject;
private volatile ContextProvider _contextProvider;
private volatile EventLoggerProvider _eventLoggerProvider;
private String _clientProduct;
private String _clientVersion;
private String _remoteProcessPid;
private String _clientId;
private volatile boolean _stopped;
private final AtomicLong _messagesIn = new AtomicLong();
private final AtomicLong _messagesOut = new AtomicLong();
private final AtomicLong _transactedMessagesIn = new AtomicLong();
private final AtomicLong _transactedMessagesOut = new AtomicLong();
private final AtomicLong _bytesIn = new AtomicLong();
private final AtomicLong _bytesOut = new AtomicLong();
private final AtomicLong _localTransactionBegins = new AtomicLong();
private final AtomicLong _localTransactionRollbacks = new AtomicLong();
private final AtomicLong _localTransactionOpens = new AtomicLong();
private final CompletableFuture<Void> _transportClosedFuture = new CompletableFuture<>();
private final CompletableFuture<Void> _modelTransportRendezvousFuture = new CompletableFuture<>();
private volatile NamedAddressSpace _addressSpace;
private volatile long _lastReadTime;
private volatile long _lastWriteTime;
private volatile long _lastMessageInboundTime;
private volatile long _lastMessageOutboundTime;
private volatile boolean _messagesWritten;
private volatile AccessControlContext _accessControllerContext;
private volatile Thread _ioThread;
private volatile StatisticsGatherer _statisticsGatherer;
private volatile boolean _messageAuthorizationRequired;
private final AtomicLong _maxMessageSize = new AtomicLong(Integer.MAX_VALUE);
private volatile int _messageCompressionThreshold;
private volatile TransactionObserver _transactionObserver;
private long _maxUncommittedInMemorySize;
private final Map<ServerTransaction, Set<Ticker>> _transactionTickers = new ConcurrentHashMap<>();
public AbstractAMQPConnection(Broker<?> broker,
ServerNetworkConnection network,
AmqpPort<?> port,
Transport transport,
Protocol protocol,
long connectionId,
AggregateTicker aggregateTicker)
{
super(port, createAttributes(connectionId, network));
_broker = broker;
_eventLoggerProvider = broker;
_contextProvider = broker;
_statisticsGatherer = broker;
_network = network;
_port = port;
_transport = transport;
_protocol = protocol;
_connectionId = connectionId;
_aggregateTicker = aggregateTicker;
_subject.getPrincipals().add(new ConnectionPrincipal(this));
updateAccessControllerContext();
_transportClosedFuture.thenRunAsync(() ->
{
_modelTransportRendezvousFuture.complete(null);
closeAsync().whenCompleteAsync((result, error) -> logConnectionClose(), getTaskExecutor());
}, getTaskExecutor());
setState(State.ACTIVE);
_logSubject = new ConnectionLogSubject(this);
}
private static Map<String, Object> createAttributes(long connectionId, NetworkConnection network)
{
Map<String,Object> attributes = new HashMap<>();
attributes.put(NAME, "[" + connectionId + "] " + String.valueOf(network.getRemoteAddress()).replaceAll("/", ""));
attributes.put(DURABLE, false);
return attributes;
}
@Override
public final AccessControlContext getAccessControlContextFromSubject(final Subject subject)
{
final AccessControlContext acc = AccessController.getContext();
return AccessController.doPrivileged(
(PrivilegedAction<AccessControlContext>) () -> {
if (subject == null)
return new AccessControlContext(acc, null);
else
return new AccessControlContext
(acc,
new SubjectDomainCombiner(subject));
});
}
@Override
protected void onOpen()
{
super.onOpen();
final long maxAuthDelay = _port.getContextValue(Long.class, Port.CONNECTION_MAXIMUM_AUTHENTICATION_DELAY);
final SlowConnectionOpenTicker slowConnectionOpenTicker = new SlowConnectionOpenTicker(maxAuthDelay);
_aggregateTicker.addTicker(slowConnectionOpenTicker);
_lastReadTime = _lastWriteTime = _lastMessageInboundTime = _lastMessageOutboundTime = getCreatedTime().getTime();
_maxUncommittedInMemorySize = getContextValue(Long.class, Connection.MAX_UNCOMMITTED_IN_MEMORY_SIZE);
_transactionObserver = _maxUncommittedInMemorySize < 0 ? FlowToDiskTransactionObserver.NOOP_TRANSACTION_OBSERVER : new FlowToDiskTransactionObserver(_maxUncommittedInMemorySize, _logSubject, _eventLoggerProvider.getEventLogger());
}
@Override
public Broker<?> getBroker()
{
return _broker;
}
public final ServerNetworkConnection getNetwork()
{
return _network;
}
@Override
public final AmqpPort<?> getPort()
{
return _port;
}
@Override
public final Transport getTransport()
{
return _transport;
}
@Override
public String getTransportInfo()
{
return _network.getTransportInfo();
}
@Override
public Protocol getProtocol()
{
return _protocol;
}
@Override
public AggregateTicker getAggregateTicker()
{
return _aggregateTicker;
}
@Override
public final Date getLastIoTime()
{
return new Date(Math.max(getLastReadTime(), getLastWriteTime()));
}
@Override
public final long getLastReadTime()
{
return _lastReadTime;
}
private void updateLastReadTime()
{
_lastReadTime = System.currentTimeMillis();
}
@Override
public final long getLastWriteTime()
{
return _lastWriteTime;
}
public final void updateLastWriteTime()
{
final long currentTime = System.currentTimeMillis();
_lastWriteTime = currentTime;
if(_messagesWritten)
{
_messagesWritten = false;
_lastMessageOutboundTime = currentTime;
}
}
@Override
public void updateLastMessageInboundTime()
{
_lastMessageInboundTime = _lastReadTime;
}
@Override
public void updateLastMessageOutboundTime()
{
_messagesWritten = true;
}
@Override
public Date getLastInboundMessageTime()
{
return new Date(_lastMessageInboundTime);
}
@Override
public Date getLastOutboundMessageTime()
{
return new Date(_lastMessageOutboundTime);
}
@Override
public Date getLastMessageTime()
{
return new Date(Math.max(_lastMessageInboundTime, _lastMessageOutboundTime));
}
@Override
public final long getConnectionId()
{
return _connectionId;
}
@Override
public String getRemoteAddressString()
{
return String.valueOf(_network.getRemoteAddress());
}
@Override
public final void stopConnection()
{
_stopped = true;
}
@Override
public boolean isConnectionStopped()
{
return _stopped;
}
@Override
public final String getAddressSpaceName()
{
return getAddressSpace() == null ? null : getAddressSpace().getName();
}
@Override
public String getClientVersion()
{
return _clientVersion;
}
@Override
public String getRemoteProcessPid()
{
return _remoteProcessPid;
}
@Override
public void pushScheduler(final NetworkConnectionScheduler networkConnectionScheduler)
{
if (_network instanceof NonBlockingConnection)
{
((NonBlockingConnection) _network).pushScheduler(networkConnectionScheduler);
}
}
@Override
public NetworkConnectionScheduler popScheduler()
{
if (_network instanceof NonBlockingConnection)
{
return ((NonBlockingConnection) _network).popScheduler();
}
return null;
}
@Override
public String getClientProduct()
{
return _clientProduct;
}
protected void updateMaxMessageSize()
{
_maxMessageSize.set(Math.min(getMaxMessageSize(getPort()), getMaxMessageSize(_contextProvider)));
}
private long getMaxMessageSize(final ContextProvider object)
{
try
{
final int maxMessageSize = object.getContextValue(Integer.class, MAX_MESSAGE_SIZE);
return maxMessageSize > 0 ? maxMessageSize : Integer.MAX_VALUE;
}
catch (NullPointerException | IllegalArgumentException e)
{
LOGGER.warn("Context variable {} has invalid value and cannot be used to restrict maximum message size",
MAX_MESSAGE_SIZE,
e);
}
return Integer.MAX_VALUE;
}
@Override
public long getMaxMessageSize()
{
return _maxMessageSize.get();
}
@Override
public void addDeleteTask(final Action<? super C> task)
{
_connectionCloseTaskList.add(task);
}
@Override
public void removeDeleteTask(final Action<? super C> task)
{
_connectionCloseTaskList.remove(task);
}
public void performDeleteTasks()
{
if(runningAsSubject())
{
for (Action<? super C> task : _connectionCloseTaskList)
{
task.performAction((C)this);
}
}
else
{
runAsSubject(() ->
{
performDeleteTasks();
return null;
});
}
}
@Override
public String getClientId()
{
return _clientId;
}
@Override
public final SocketAddress getRemoteSocketAddress()
{
return _network.getRemoteAddress();
}
@Override
public void registerMessageDelivered(long messageSize)
{
_messagesOut.incrementAndGet();
_bytesOut.addAndGet(messageSize);
_statisticsGatherer.registerMessageDelivered(messageSize);
}
@Override
public void registerMessageReceived(long messageSize)
{
updateLastMessageInboundTime();
_messagesIn.incrementAndGet();
_bytesIn.addAndGet(messageSize);
_statisticsGatherer.registerMessageReceived(messageSize);
}
@Override
public void registerTransactedMessageDelivered()
{
_transactedMessagesOut.incrementAndGet();
_statisticsGatherer.registerTransactedMessageDelivered();
}
@Override
public void registerTransactedMessageReceived()
{
_transactedMessagesIn.incrementAndGet();
_statisticsGatherer.registerTransactedMessageReceived();
}
public void setClientProduct(final String clientProduct)
{
_clientProduct = clientProduct;
}
public void setClientVersion(final String clientVersion)
{
_clientVersion = clientVersion;
}
public void setRemoteProcessPid(final String remoteProcessPid)
{
_remoteProcessPid = remoteProcessPid;
}
public void setClientId(final String clientId)
{
_clientId = clientId;
}
@Override
public void setIOThread(final Thread ioThread)
{
_ioThread = ioThread;
}
@Override
public boolean isIOThread()
{
return Thread.currentThread() == _ioThread;
}
@Override
public CompletableFuture<Void> doOnIOThreadAsync(final Runnable task)
{
if (isIOThread())
{
task.run();
return CompletableFuture.completedFuture(null);
}
else
{
final CompletableFuture<Void> future = new CompletableFuture<>();
addAsyncTask(object ->
{
try
{
task.run();
future.complete(null);
}
catch (RuntimeException e)
{
future.completeExceptionally(e);
}
});
return future;
}
}
@Override
public final void received(final QpidByteBuffer buf)
{
AccessController.doPrivileged((PrivilegedAction<Object>) () ->
{
updateLastReadTime();
try
{
onReceive(buf);
}
catch (StoreException e)
{
if (getAddressSpace().isActive())
{
throw new ServerScopedRuntimeException(e);
}
else
{
throw new ConnectionScopedRuntimeException(e);
}
}
return null;
}, getAccessControllerContext());
}
protected abstract void onReceive(final QpidByteBuffer msg);
protected abstract void addAsyncTask(final Action<? super T> action);
protected abstract boolean isOpeningInProgress();
protected <T> T runAsSubject(PrivilegedAction<T> action)
{
return Subject.doAs(_subject, action);
}
private boolean runningAsSubject()
{
return _subject.equals(Subject.getSubject(AccessController.getContext()));
}
@Override
public Subject getSubject()
{
return _subject;
}
@Override
public TaskExecutor getChildExecutor()
{
NamedAddressSpace addressSpace = getAddressSpace();
if (addressSpace instanceof TaskExecutorProvider)
{
return ((TaskExecutorProvider)addressSpace).getTaskExecutor();
}
else
{
return super.getChildExecutor();
}
}
@Override
public boolean isIncoming()
{
return true;
}
@Override
public String getLocalAddress()
{
return null;
}
@Override
public String getPrincipal()
{
final Principal authorizedPrincipal = getAuthorizedPrincipal();
return authorizedPrincipal == null ? null : authorizedPrincipal.getName();
}
@Override
public String getRemoteAddress()
{
return getRemoteAddressString();
}
@Override
public String getRemoteProcessName()
{
return null;
}
@Override
public Collection<Session> getSessions()
{
return getChildren(Session.class);
}
@Override
protected CompletableFuture<Void> onDelete()
{
return closeAsyncIfNotAlreadyClosing();
}
@Override
protected CompletableFuture<Void> beforeClose()
{
return closeAsyncIfNotAlreadyClosing();
}
@Override
protected CompletableFuture<Void> onClose()
{
if (_transactionObserver != null)
{
_transactionObserver.reset();
}
return CompletableFuture.completedFuture(null);
}
private CompletableFuture<Void> closeAsyncIfNotAlreadyClosing()
{
if (!_modelTransportRendezvousFuture.isDone())
{
sendConnectionCloseAsync(CloseReason.MANAGEMENT, "Connection closed by external action");
}
return _modelTransportRendezvousFuture;
}
@Override
protected <C extends ConfiguredObject> CompletableFuture<C> addChildAsync(Class<C> childClass,
Map<String, Object> attributes)
{
if (childClass == Session.class)
{
throw new IllegalStateException();
}
else
{
throw new IllegalArgumentException("Cannot create a child of class " + childClass.getSimpleName());
}
}
@Override
public long getBytesIn()
{
return _bytesIn.get();
}
@Override
public long getBytesOut()
{
return _bytesOut.get();
}
@Override
public long getMessagesIn()
{
return _messagesIn.get();
}
@Override
public long getMessagesOut()
{
return _messagesOut.get();
}
@Override
public long getTransactedMessagesIn()
{
return _transactedMessagesIn.get();
}
@Override
public long getTransactedMessagesOut()
{
return _transactedMessagesOut.get();
}
@Override
public void resetStatistics()
{
_lastMessageInboundTime = System.currentTimeMillis();
_lastMessageOutboundTime = System.currentTimeMillis();
_bytesIn.set(0L);
_bytesOut.set(0L);
_messagesIn.set(0L);
_messagesOut.set(0L);
_transactedMessagesIn.set(0L);
_transactedMessagesOut.set(0L);
_localTransactionBegins.set(0L);
_localTransactionRollbacks.set(0L);
getChildren(Session.class).stream()
.filter(AbstractAMQPSession.class::isInstance).map(session -> (AbstractAMQPSession<?, ?>) session)
.forEach(AbstractAMQPSession::resetStatistics);
}
public AccessControlContext getAccessControllerContext()
{
return _accessControllerContext;
}
public final void updateAccessControllerContext()
{
_accessControllerContext = getAccessControlContextFromSubject(
getSubject());
}
private void logConnectionOpen()
{
runAsSubject(() ->
{
final String localAddressStr = _network.formattedLocalAddress();
getEventLogger().message(ConnectionMessages.OPEN(getPort().getName(),
localAddressStr,
getProtocol().getProtocolVersion(),
getClientId(),
getClientVersion(),
getClientProduct(),
getTransport().isSecure(),
getClientId() != null,
getClientVersion() != null,
getClientProduct() != null));
return null;
});
}
private void logConnectionClose()
{
runAsSubject((PrivilegedAction<Void>) () ->
{
String closeCause = getCloseCause();
getEventLogger().message(isOrderlyClose()
? ConnectionMessages.CLOSE(closeCause, closeCause != null)
: ConnectionMessages.DROPPED_CONNECTION());
return null;
});
}
protected void initialiseHeartbeating(final long writerDelay, final long readerDelay)
{
if (writerDelay > 0)
{
_aggregateTicker.addTicker(new ServerIdleWriteTimeoutTicker(this, (int) writerDelay));
_network.setMaxWriteIdleMillis(writerDelay);
}
if (readerDelay > 0)
{
_aggregateTicker.addTicker(new ServerIdleReadTimeoutTicker(_network, this, (int) readerDelay));
_network.setMaxReadIdleMillis(readerDelay);
}
}
protected abstract boolean isOrderlyClose();
protected abstract String getCloseCause();
@Override
public int getSessionCount()
{
return getSessionModels().size();
}
protected void markTransportClosed()
{
_transportClosedFuture.complete(null);
}
public LogSubject getLogSubject()
{
return _logSubject;
}
@Override
public EventLogger getEventLogger()
{
return _eventLoggerProvider.getEventLogger();
}
@Override
public final void checkAuthorizedMessagePrincipal(final String userId)
{
if(!(userId == null
|| "".equals(userId.trim())
|| !_messageAuthorizationRequired
|| getAuthorizedPrincipal().getName().equals(userId)))
{
throw new AccessControlException("The user id of the message '"
+ userId
+ "' is not valid on a connection authenticated as "
+ getAuthorizedPrincipal().getName());
}
}
@Override
public NamedAddressSpace getAddressSpace()
{
return _addressSpace;
}
public ContextProvider getContextProvider()
{
return _contextProvider;
}
public void setAddressSpace(NamedAddressSpace addressSpace)
{
_addressSpace = addressSpace;
if(addressSpace instanceof EventLoggerProvider)
{
_eventLoggerProvider = (EventLoggerProvider)addressSpace;
}
if(addressSpace instanceof ContextProvider)
{
_contextProvider = (ContextProvider) addressSpace;
}
if(addressSpace instanceof StatisticsGatherer)
{
_statisticsGatherer = (StatisticsGatherer) addressSpace;
}
updateMaxMessageSize();
_messageAuthorizationRequired = _contextProvider.getContextValue(Boolean.class, Broker.BROKER_MSG_AUTH);
_messageCompressionThreshold = _contextProvider.getContextValue(Integer.class,
Broker.MESSAGE_COMPRESSION_THRESHOLD_SIZE);
if(_messageCompressionThreshold <= 0)
{
_messageCompressionThreshold = Integer.MAX_VALUE;
}
getSubject().getPrincipals().add(addressSpace.getPrincipal());
updateAccessControllerContext();
logConnectionOpen();
}
@Override
public int getMessageCompressionThreshold()
{
return _messageCompressionThreshold;
}
@Override
public long getMaxUncommittedInMemorySize()
{
return _maxUncommittedInMemorySize;
}
@Override
public String toString()
{
return getNetwork().getRemoteAddress() + "(" + ((getAuthorizedPrincipal() == null ? "?" : getAuthorizedPrincipal().getName()) + ")");
}
@Override
public Principal getAuthorizedPrincipal()
{
return AuthenticatedPrincipal.getOptionalAuthenticatedPrincipalFromSubject(getSubject());
}
public void setSubject(final Subject subject)
{
if (subject == null)
{
throw new IllegalArgumentException("subject cannot be null");
}
getSubject().getPrincipals().addAll(subject.getPrincipals());
getSubject().getPrivateCredentials().addAll(subject.getPrivateCredentials());
getSubject().getPublicCredentials().addAll(subject.getPublicCredentials());
updateAccessControllerContext();
}
@Override
public LocalTransaction createLocalTransaction()
{
_localTransactionBegins.incrementAndGet();
_localTransactionOpens.incrementAndGet();
return new LocalTransaction(getAddressSpace().getMessageStore(),
() -> getLastReadTime(),
_transactionObserver,
getProtocol() != Protocol.AMQP_1_0);
}
@Override
public void registerTransactionTickers(final ServerTransaction serverTransaction,
final Action<String> closeAction, final long notificationRepeatPeriod)
{
NamedAddressSpace addressSpace = getAddressSpace();
if (addressSpace instanceof QueueManagingVirtualHost)
{
final QueueManagingVirtualHost<?> virtualhost = (QueueManagingVirtualHost<?>) addressSpace;
EventLogger eventLogger = virtualhost.getEventLogger();
final Set<Ticker> tickers = new LinkedHashSet<>(4);
if (virtualhost.getStoreTransactionOpenTimeoutWarn() > 0)
{
tickers.add(new TransactionTimeoutTicker(
virtualhost.getStoreTransactionOpenTimeoutWarn(),
notificationRepeatPeriod, serverTransaction::getTransactionStartTime,
age -> eventLogger.message(getLogSubject(), ConnectionMessages.OPEN_TXN(age))
));
}
if (virtualhost.getStoreTransactionOpenTimeoutClose() > 0)
{
tickers.add(new TransactionTimeoutTicker(
virtualhost.getStoreTransactionOpenTimeoutClose(),
notificationRepeatPeriod, serverTransaction::getTransactionStartTime,
age -> closeAction.performAction(OPEN_TRANSACTION_TIMEOUT_ERROR)));
}
if (virtualhost.getStoreTransactionIdleTimeoutWarn() > 0)
{
tickers.add(new TransactionTimeoutTicker(
virtualhost.getStoreTransactionIdleTimeoutWarn(),
notificationRepeatPeriod, serverTransaction::getTransactionUpdateTime,
age -> eventLogger.message(getLogSubject(), ConnectionMessages.IDLE_TXN(age))
));
}
if (virtualhost.getStoreTransactionIdleTimeoutClose() > 0)
{
tickers.add(new TransactionTimeoutTicker(
virtualhost.getStoreTransactionIdleTimeoutClose(),
notificationRepeatPeriod, serverTransaction::getTransactionUpdateTime,
age -> closeAction.performAction(IDLE_TRANSACTION_TIMEOUT_ERROR)
));
}
if (!tickers.isEmpty())
{
for (Ticker ticker : tickers)
{
getAggregateTicker().addTicker(ticker);
}
notifyWork();
}
_transactionTickers.put(serverTransaction, tickers);
}
}
@Override
public void unregisterTransactionTickers(final ServerTransaction serverTransaction)
{
NamedAddressSpace addressSpace = getAddressSpace();
if (addressSpace instanceof QueueManagingVirtualHost)
{
_transactionTickers.remove(serverTransaction).forEach(t -> getAggregateTicker().removeTicker(t));
}
}
private class SlowConnectionOpenTicker implements Ticker, SchedulingDelayNotificationListener
{
private final long _allowedTime;
private volatile long _accumulatedSchedulingDelay;
SlowConnectionOpenTicker(long timeoutTime)
{
_allowedTime = timeoutTime;
}
@Override
public int getTimeToNextTick(final long currentTime)
{
return (int) (getCreatedTime().getTime() + _allowedTime + _accumulatedSchedulingDelay - currentTime);
}
@Override
public int tick(final long currentTime)
{
int nextTick = getTimeToNextTick(currentTime);
if(nextTick <= 0)
{
if (isOpeningInProgress())
{
LOGGER.warn("Connection has taken more than {} ms to establish. Closing as possible DoS.",
_allowedTime);
getEventLogger().message(ConnectionMessages.IDLE_CLOSE(
"Protocol connection is not established within timeout period", true));
_network.close();
}
else
{
_aggregateTicker.removeTicker(this);
_network.removeSchedulingDelayNotificationListeners(this);
}
}
return nextTick;
}
@Override
public void notifySchedulingDelay(final long schedulingDelay)
{
if (schedulingDelay > 0)
{
_accumulatedSchedulingDelay += schedulingDelay;
}
}
}
@Override
protected void logOperation(final String operation)
{
getEventLogger().message(ConnectionMessages.OPERATION(operation));
}
@Override
public String getLocalFQDN()
{
SocketAddress address = getNetwork().getLocalAddress();
if (address instanceof InetSocketAddress)
{
return ((InetSocketAddress) address).getHostName();
}
else
{
throw new IllegalArgumentException("Unsupported socket address class: " + address);
}
}
@Override
public Principal getExternalPrincipal()
{
return getNetwork().getPeerPrincipal();
}
@Override
public Date getOldestTransactionStartTime()
{
long oldest = Long.MAX_VALUE;
Iterator<ServerTransaction> iterator = getOpenTransactions();
while (iterator.hasNext())
{
final ServerTransaction value = iterator.next();
if (value instanceof LocalTransaction)
{
long transactionStartTimeLong = value.getTransactionStartTime();
if (transactionStartTimeLong > 0 && oldest > transactionStartTimeLong)
{
oldest = transactionStartTimeLong;
}
}
}
return oldest == Long.MAX_VALUE ? null : new Date(oldest);
}
@Override
public long getLocalTransactionBegins()
{
return _localTransactionBegins.get();
}
@Override
public long getLocalTransactionOpen()
{
return _localTransactionOpens.get();
}
@Override
public long getLocalTransactionRollbacks()
{
return _localTransactionRollbacks.get();
}
@Override
public void incrementTransactionRollbackCounter()
{
_localTransactionRollbacks.incrementAndGet();
}
@Override
public void decrementTransactionOpenCounter()
{
_localTransactionOpens.decrementAndGet();
}
@Override
public void incrementTransactionOpenCounter()
{
_localTransactionOpens.incrementAndGet();
}
@Override
public void incrementTransactionBeginCounter()
{
_localTransactionBegins.incrementAndGet();
}
@Override
protected void logCreated(final Map<String, Object> attributes,
final Outcome outcome)
{
logConnectionOpen();
}
@Override
protected void logDeleted(final Outcome outcome)
{
getEventLogger().message(_logSubject, ConnectionMessages.MODEL_DELETE());
}
}
|
google/j2objc | 35,862 | jre_emul/android/platform/libcore/jsr166-tests/src/test/java/jsr166/LinkedTransferQueueTest.java | /*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/publicdomain/zero/1.0/
* Other contributors include John Vint
*/
package jsr166;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Queue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executors;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedTransferQueue;
import junit.framework.Test;
@SuppressWarnings({"unchecked", "rawtypes"})
public class LinkedTransferQueueTest extends JSR166TestCase {
static class Implementation implements CollectionImplementation {
public Class<?> klazz() { return LinkedTransferQueue.class; }
public Collection emptyCollection() { return new LinkedTransferQueue(); }
public Object makeElement(int i) { return i; }
public boolean isConcurrent() { return true; }
public boolean permitsNulls() { return false; }
}
// android-note: These tests have been moved into their own separate
// classes to work around CTS issues:
// LinkedTransferQueueBlockingQueueTest.java
// LinkedTransferQueueCollectionTest.java
//
// public static class Generic extends BlockingQueueTest {
// protected BlockingQueue emptyCollection() {
// return new LinkedTransferQueue();
// }
// }
// android-note: Removed because the CTS runner does a bad job of
// retrying tests that have suite() declarations.
//
// public static void main(String[] args) {
// main(suite(), args);
// }
// public static Test suite() {
// return newTestSuite(LinkedTransferQueueTest.class,
// new Generic().testSuite(),
// CollectionTest.testSuite(new Implementation()));
// }
/**
* Constructor builds new queue with size being zero and empty
* being true
*/
public void testConstructor1() {
assertEquals(0, new LinkedTransferQueue().size());
assertTrue(new LinkedTransferQueue().isEmpty());
}
/**
* Initializing constructor with null collection throws
* NullPointerException
*/
public void testConstructor2() {
try {
new LinkedTransferQueue(null);
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Initializing from Collection of null elements throws
* NullPointerException
*/
public void testConstructor3() {
Collection<Integer> elements = Arrays.asList(new Integer[SIZE]);
try {
new LinkedTransferQueue(elements);
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Initializing constructor with a collection containing some null elements
* throws NullPointerException
*/
public void testConstructor4() {
Integer[] ints = new Integer[SIZE];
for (int i = 0; i < SIZE - 1; ++i)
ints[i] = i;
Collection<Integer> elements = Arrays.asList(ints);
try {
new LinkedTransferQueue(elements);
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Queue contains all elements of the collection it is initialized by
*/
public void testConstructor5() {
Integer[] ints = new Integer[SIZE];
for (int i = 0; i < SIZE; ++i) {
ints[i] = i;
}
List intList = Arrays.asList(ints);
LinkedTransferQueue q
= new LinkedTransferQueue(intList);
assertEquals(q.size(), intList.size());
assertEquals(q.toString(), intList.toString());
assertTrue(Arrays.equals(q.toArray(),
intList.toArray()));
assertTrue(Arrays.equals(q.toArray(new Object[0]),
intList.toArray(new Object[0])));
assertTrue(Arrays.equals(q.toArray(new Object[SIZE]),
intList.toArray(new Object[SIZE])));
for (int i = 0; i < SIZE; ++i) {
assertEquals(ints[i], q.poll());
}
}
/**
* remainingCapacity() always returns Integer.MAX_VALUE
*/
public void testRemainingCapacity() {
BlockingQueue q = populatedQueue(SIZE);
for (int i = 0; i < SIZE; ++i) {
assertEquals(Integer.MAX_VALUE, q.remainingCapacity());
assertEquals(SIZE - i, q.size());
assertEquals(i, q.remove());
}
for (int i = 0; i < SIZE; ++i) {
assertEquals(Integer.MAX_VALUE, q.remainingCapacity());
assertEquals(i, q.size());
assertTrue(q.add(i));
}
}
/**
* addAll(this) throws IllegalArgumentException
*/
public void testAddAllSelf() {
LinkedTransferQueue q = populatedQueue(SIZE);
try {
q.addAll(q);
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* addAll of a collection with any null elements throws
* NullPointerException after possibly adding some elements
*/
public void testAddAll3() {
LinkedTransferQueue q = new LinkedTransferQueue();
Integer[] ints = new Integer[SIZE];
for (int i = 0; i < SIZE - 1; ++i)
ints[i] = i;
try {
q.addAll(Arrays.asList(ints));
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Queue contains all elements, in traversal order, of successful addAll
*/
public void testAddAll5() {
Integer[] empty = new Integer[0];
Integer[] ints = new Integer[SIZE];
for (int i = 0; i < SIZE; ++i) {
ints[i] = i;
}
LinkedTransferQueue q = new LinkedTransferQueue();
assertFalse(q.addAll(Arrays.asList(empty)));
assertTrue(q.addAll(Arrays.asList(ints)));
for (int i = 0; i < SIZE; ++i) {
assertEquals(ints[i], q.poll());
}
}
/**
* all elements successfully put are contained
*/
public void testPut() {
LinkedTransferQueue<Integer> q = new LinkedTransferQueue<Integer>();
for (int i = 0; i < SIZE; ++i) {
assertEquals(i, q.size());
q.put(i);
assertTrue(q.contains(i));
}
}
/**
* take retrieves elements in FIFO order
*/
public void testTake() throws InterruptedException {
LinkedTransferQueue<Integer> q = populatedQueue(SIZE);
for (int i = 0; i < SIZE; ++i) {
assertEquals(i, (int) q.take());
}
}
/**
* take removes existing elements until empty, then blocks interruptibly
*/
public void testBlockingTake() throws InterruptedException {
final BlockingQueue q = populatedQueue(SIZE);
final CountDownLatch pleaseInterrupt = new CountDownLatch(1);
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
for (int i = 0; i < SIZE; ++i) {
assertEquals(i, q.take());
}
Thread.currentThread().interrupt();
try {
q.take();
shouldThrow();
} catch (InterruptedException success) {}
assertFalse(Thread.interrupted());
pleaseInterrupt.countDown();
try {
q.take();
shouldThrow();
} catch (InterruptedException success) {}
assertFalse(Thread.interrupted());
}});
await(pleaseInterrupt);
assertThreadStaysAlive(t);
t.interrupt();
awaitTermination(t);
}
/**
* poll succeeds unless empty
*/
public void testPoll() throws InterruptedException {
LinkedTransferQueue<Integer> q = populatedQueue(SIZE);
for (int i = 0; i < SIZE; ++i) {
assertEquals(i, (int) q.poll());
}
assertNull(q.poll());
checkEmpty(q);
}
/**
* timed poll with zero timeout succeeds when non-empty, else times out
*/
public void testTimedPoll0() throws InterruptedException {
LinkedTransferQueue<Integer> q = populatedQueue(SIZE);
for (int i = 0; i < SIZE; ++i) {
assertEquals(i, (int) q.poll(0, MILLISECONDS));
}
assertNull(q.poll(0, MILLISECONDS));
checkEmpty(q);
}
/**
* timed poll with nonzero timeout succeeds when non-empty, else times out
*/
public void testTimedPoll() throws InterruptedException {
LinkedTransferQueue<Integer> q = populatedQueue(SIZE);
long startTime = System.nanoTime();
for (int i = 0; i < SIZE; ++i)
assertEquals(i, (int) q.poll(LONG_DELAY_MS, MILLISECONDS));
assertTrue(millisElapsedSince(startTime) < LONG_DELAY_MS);
startTime = System.nanoTime();
assertNull(q.poll(timeoutMillis(), MILLISECONDS));
assertTrue(millisElapsedSince(startTime) >= timeoutMillis());
checkEmpty(q);
}
/**
* Interrupted timed poll throws InterruptedException instead of
* returning timeout status
*/
public void testInterruptedTimedPoll() throws InterruptedException {
final BlockingQueue<Integer> q = populatedQueue(SIZE);
final CountDownLatch aboutToWait = new CountDownLatch(1);
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
long startTime = System.nanoTime();
for (int i = 0; i < SIZE; ++i)
assertEquals(i, (int) q.poll(LONG_DELAY_MS, MILLISECONDS));
aboutToWait.countDown();
try {
q.poll(LONG_DELAY_MS, MILLISECONDS);
shouldThrow();
} catch (InterruptedException success) {}
assertTrue(millisElapsedSince(startTime) < LONG_DELAY_MS);
}});
aboutToWait.await();
waitForThreadToEnterWaitState(t);
t.interrupt();
awaitTermination(t);
checkEmpty(q);
}
/**
* timed poll after thread interrupted throws InterruptedException
* instead of returning timeout status
*/
public void testTimedPollAfterInterrupt() throws InterruptedException {
final BlockingQueue<Integer> q = populatedQueue(SIZE);
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
long startTime = System.nanoTime();
Thread.currentThread().interrupt();
for (int i = 0; i < SIZE; ++i)
assertEquals(i, (int) q.poll(LONG_DELAY_MS, MILLISECONDS));
try {
q.poll(LONG_DELAY_MS, MILLISECONDS);
shouldThrow();
} catch (InterruptedException success) {}
assertTrue(millisElapsedSince(startTime) < LONG_DELAY_MS);
}});
awaitTermination(t);
checkEmpty(q);
}
/**
* peek returns next element, or null if empty
*/
public void testPeek() throws InterruptedException {
LinkedTransferQueue<Integer> q = populatedQueue(SIZE);
for (int i = 0; i < SIZE; ++i) {
assertEquals(i, (int) q.peek());
assertEquals(i, (int) q.poll());
assertTrue(q.peek() == null ||
i != (int) q.peek());
}
assertNull(q.peek());
checkEmpty(q);
}
/**
* element returns next element, or throws NoSuchElementException if empty
*/
public void testElement() throws InterruptedException {
LinkedTransferQueue<Integer> q = populatedQueue(SIZE);
for (int i = 0; i < SIZE; ++i) {
assertEquals(i, (int) q.element());
assertEquals(i, (int) q.poll());
}
try {
q.element();
shouldThrow();
} catch (NoSuchElementException success) {}
checkEmpty(q);
}
/**
* remove removes next element, or throws NoSuchElementException if empty
*/
public void testRemove() throws InterruptedException {
LinkedTransferQueue<Integer> q = populatedQueue(SIZE);
for (int i = 0; i < SIZE; ++i) {
assertEquals(i, (int) q.remove());
}
try {
q.remove();
shouldThrow();
} catch (NoSuchElementException success) {}
checkEmpty(q);
}
/**
* An add following remove(x) succeeds
*/
public void testRemoveElementAndAdd() throws InterruptedException {
LinkedTransferQueue q = new LinkedTransferQueue();
assertTrue(q.add(one));
assertTrue(q.add(two));
assertTrue(q.remove(one));
assertTrue(q.remove(two));
assertTrue(q.add(three));
assertSame(q.take(), three);
}
/**
* contains(x) reports true when elements added but not yet removed
*/
public void testContains() {
LinkedTransferQueue<Integer> q = populatedQueue(SIZE);
for (int i = 0; i < SIZE; ++i) {
assertTrue(q.contains(i));
assertEquals(i, (int) q.poll());
assertFalse(q.contains(i));
}
}
/**
* clear removes all elements
*/
public void testClear() throws InterruptedException {
LinkedTransferQueue q = populatedQueue(SIZE);
q.clear();
checkEmpty(q);
assertEquals(Integer.MAX_VALUE, q.remainingCapacity());
q.add(one);
assertFalse(q.isEmpty());
assertEquals(1, q.size());
assertTrue(q.contains(one));
q.clear();
checkEmpty(q);
}
/**
* containsAll(c) is true when c contains a subset of elements
*/
public void testContainsAll() {
LinkedTransferQueue<Integer> q = populatedQueue(SIZE);
LinkedTransferQueue<Integer> p = new LinkedTransferQueue<Integer>();
for (int i = 0; i < SIZE; ++i) {
assertTrue(q.containsAll(p));
assertFalse(p.containsAll(q));
p.add(i);
}
assertTrue(p.containsAll(q));
}
/**
* retainAll(c) retains only those elements of c and reports true
* if changed
*/
public void testRetainAll() {
LinkedTransferQueue q = populatedQueue(SIZE);
LinkedTransferQueue p = populatedQueue(SIZE);
for (int i = 0; i < SIZE; ++i) {
boolean changed = q.retainAll(p);
if (i == 0) {
assertFalse(changed);
} else {
assertTrue(changed);
}
assertTrue(q.containsAll(p));
assertEquals(SIZE - i, q.size());
p.remove();
}
}
/**
* removeAll(c) removes only those elements of c and reports true
* if changed
*/
public void testRemoveAll() {
for (int i = 1; i < SIZE; ++i) {
LinkedTransferQueue q = populatedQueue(SIZE);
LinkedTransferQueue p = populatedQueue(i);
assertTrue(q.removeAll(p));
assertEquals(SIZE - i, q.size());
for (int j = 0; j < i; ++j) {
assertFalse(q.contains(p.remove()));
}
}
}
/**
* toArray() contains all elements in FIFO order
*/
public void testToArray() {
LinkedTransferQueue q = populatedQueue(SIZE);
Object[] o = q.toArray();
for (int i = 0; i < o.length; i++) {
assertSame(o[i], q.poll());
}
}
/**
* toArray(a) contains all elements in FIFO order
*/
public void testToArray2() {
LinkedTransferQueue<Integer> q = populatedQueue(SIZE);
Integer[] ints = new Integer[SIZE];
Integer[] array = q.toArray(ints);
assertSame(ints, array);
for (int i = 0; i < ints.length; i++) {
assertSame(ints[i], q.poll());
}
}
/**
* toArray(incompatible array type) throws ArrayStoreException
*/
public void testToArray1_BadArg() {
LinkedTransferQueue q = populatedQueue(SIZE);
try {
q.toArray(new String[10]);
shouldThrow();
} catch (ArrayStoreException success) {}
}
/**
* iterator iterates through all elements
*/
public void testIterator() throws InterruptedException {
LinkedTransferQueue q = populatedQueue(SIZE);
Iterator it = q.iterator();
int i;
for (i = 0; it.hasNext(); i++)
assertTrue(q.contains(it.next()));
assertEquals(i, SIZE);
assertIteratorExhausted(it);
it = q.iterator();
for (i = 0; it.hasNext(); i++)
assertEquals(it.next(), q.take());
assertEquals(i, SIZE);
assertIteratorExhausted(it);
}
/**
* iterator of empty collection has no elements
*/
public void testEmptyIterator() {
assertIteratorExhausted(new LinkedTransferQueue().iterator());
}
/**
* iterator.remove() removes current element
*/
public void testIteratorRemove() {
final LinkedTransferQueue q = new LinkedTransferQueue();
q.add(two);
q.add(one);
q.add(three);
Iterator it = q.iterator();
it.next();
it.remove();
it = q.iterator();
assertSame(it.next(), one);
assertSame(it.next(), three);
assertFalse(it.hasNext());
}
/**
* iterator ordering is FIFO
*/
public void testIteratorOrdering() {
final LinkedTransferQueue<Integer> q
= new LinkedTransferQueue<Integer>();
assertEquals(Integer.MAX_VALUE, q.remainingCapacity());
q.add(one);
q.add(two);
q.add(three);
assertEquals(Integer.MAX_VALUE, q.remainingCapacity());
int k = 0;
for (Integer n : q) {
assertEquals(++k, (int) n);
}
assertEquals(3, k);
}
/**
* Modifications do not cause iterators to fail
*/
public void testWeaklyConsistentIteration() {
final LinkedTransferQueue q = new LinkedTransferQueue();
q.add(one);
q.add(two);
q.add(three);
for (Iterator it = q.iterator(); it.hasNext();) {
q.remove();
it.next();
}
assertEquals(0, q.size());
}
/**
* toString contains toStrings of elements
*/
public void testToString() {
LinkedTransferQueue q = populatedQueue(SIZE);
String s = q.toString();
for (int i = 0; i < SIZE; ++i) {
assertTrue(s.contains(String.valueOf(i)));
}
}
/**
* offer transfers elements across Executor tasks
*/
public void testOfferInExecutor() {
final LinkedTransferQueue q = new LinkedTransferQueue();
final CheckedBarrier threadsStarted = new CheckedBarrier(2);
final ExecutorService executor = Executors.newFixedThreadPool(2);
try (PoolCleaner cleaner = cleaner(executor)) {
executor.execute(new CheckedRunnable() {
public void realRun() throws InterruptedException {
threadsStarted.await();
long startTime = System.nanoTime();
assertTrue(q.offer(one, LONG_DELAY_MS, MILLISECONDS));
assertTrue(millisElapsedSince(startTime) < LONG_DELAY_MS);
}});
executor.execute(new CheckedRunnable() {
public void realRun() throws InterruptedException {
threadsStarted.await();
assertSame(one, q.take());
checkEmpty(q);
}});
}
}
/**
* timed poll retrieves elements across Executor threads
*/
public void testPollInExecutor() {
final LinkedTransferQueue q = new LinkedTransferQueue();
final CheckedBarrier threadsStarted = new CheckedBarrier(2);
final ExecutorService executor = Executors.newFixedThreadPool(2);
try (PoolCleaner cleaner = cleaner(executor)) {
executor.execute(new CheckedRunnable() {
public void realRun() throws InterruptedException {
assertNull(q.poll());
threadsStarted.await();
long startTime = System.nanoTime();
assertSame(one, q.poll(LONG_DELAY_MS, MILLISECONDS));
assertTrue(millisElapsedSince(startTime) < LONG_DELAY_MS);
checkEmpty(q);
}});
executor.execute(new CheckedRunnable() {
public void realRun() throws InterruptedException {
threadsStarted.await();
q.put(one);
}});
}
}
/**
* A deserialized serialized queue has same elements in same order
*/
public void testSerialization() throws Exception {
Queue x = populatedQueue(SIZE);
Queue y = serialClone(x);
assertNotSame(y, x);
assertEquals(x.size(), y.size());
assertEquals(x.toString(), y.toString());
assertTrue(Arrays.equals(x.toArray(), y.toArray()));
while (!x.isEmpty()) {
assertFalse(y.isEmpty());
assertEquals(x.remove(), y.remove());
}
assertTrue(y.isEmpty());
}
/**
* drainTo(c) empties queue into another collection c
*/
public void testDrainTo() {
LinkedTransferQueue q = populatedQueue(SIZE);
ArrayList l = new ArrayList();
q.drainTo(l);
assertEquals(0, q.size());
assertEquals(SIZE, l.size());
for (int i = 0; i < SIZE; ++i) {
assertEquals(i, l.get(i));
}
q.add(zero);
q.add(one);
assertFalse(q.isEmpty());
assertTrue(q.contains(zero));
assertTrue(q.contains(one));
l.clear();
q.drainTo(l);
assertEquals(0, q.size());
assertEquals(2, l.size());
for (int i = 0; i < 2; ++i) {
assertEquals(i, l.get(i));
}
}
/**
* drainTo(c) empties full queue, unblocking a waiting put.
*/
public void testDrainToWithActivePut() throws InterruptedException {
final LinkedTransferQueue q = populatedQueue(SIZE);
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() {
q.put(SIZE + 1);
}});
ArrayList l = new ArrayList();
q.drainTo(l);
assertTrue(l.size() >= SIZE);
for (int i = 0; i < SIZE; ++i)
assertEquals(i, l.get(i));
awaitTermination(t);
assertTrue(q.size() + l.size() >= SIZE);
}
/**
* drainTo(c, n) empties first min(n, size) elements of queue into c
*/
public void testDrainToN() {
LinkedTransferQueue q = new LinkedTransferQueue();
for (int i = 0; i < SIZE + 2; ++i) {
for (int j = 0; j < SIZE; j++) {
assertTrue(q.offer(j));
}
ArrayList l = new ArrayList();
q.drainTo(l, i);
int k = (i < SIZE) ? i : SIZE;
assertEquals(k, l.size());
assertEquals(SIZE - k, q.size());
for (int j = 0; j < k; ++j)
assertEquals(j, l.get(j));
do {} while (q.poll() != null);
}
}
/**
* timed poll() or take() increments the waiting consumer count;
* offer(e) decrements the waiting consumer count
*/
public void testWaitingConsumer() throws InterruptedException {
final LinkedTransferQueue q = new LinkedTransferQueue();
assertEquals(0, q.getWaitingConsumerCount());
assertFalse(q.hasWaitingConsumer());
final CountDownLatch threadStarted = new CountDownLatch(1);
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
threadStarted.countDown();
long startTime = System.nanoTime();
assertSame(one, q.poll(LONG_DELAY_MS, MILLISECONDS));
assertEquals(0, q.getWaitingConsumerCount());
assertFalse(q.hasWaitingConsumer());
assertTrue(millisElapsedSince(startTime) < LONG_DELAY_MS);
}});
threadStarted.await();
Callable<Boolean> oneConsumer
= new Callable<Boolean>() { public Boolean call() {
return q.hasWaitingConsumer()
&& q.getWaitingConsumerCount() == 1; }};
waitForThreadToEnterWaitState(t, oneConsumer);
assertTrue(q.offer(one));
assertEquals(0, q.getWaitingConsumerCount());
assertFalse(q.hasWaitingConsumer());
awaitTermination(t);
}
/**
* transfer(null) throws NullPointerException
*/
public void testTransfer1() throws InterruptedException {
try {
LinkedTransferQueue q = new LinkedTransferQueue();
q.transfer(null);
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* transfer waits until a poll occurs. The transfered element
* is returned by this associated poll.
*/
public void testTransfer2() throws InterruptedException {
final LinkedTransferQueue<Integer> q
= new LinkedTransferQueue<Integer>();
final CountDownLatch threadStarted = new CountDownLatch(1);
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
threadStarted.countDown();
q.transfer(five);
checkEmpty(q);
}});
threadStarted.await();
Callable<Boolean> oneElement
= new Callable<Boolean>() { public Boolean call() {
return !q.isEmpty() && q.size() == 1; }};
waitForThreadToEnterWaitState(t, oneElement);
assertSame(five, q.poll());
checkEmpty(q);
awaitTermination(t);
}
/**
* transfer waits until a poll occurs, and then transfers in fifo order
*/
public void testTransfer3() throws InterruptedException {
final LinkedTransferQueue<Integer> q
= new LinkedTransferQueue<Integer>();
Thread first = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
q.transfer(four);
assertTrue(!q.contains(four));
assertEquals(1, q.size());
}});
Thread interruptedThread = newStartedThread(
new CheckedInterruptedRunnable() {
public void realRun() throws InterruptedException {
while (q.isEmpty())
Thread.yield();
q.transfer(five);
}});
while (q.size() < 2)
Thread.yield();
assertEquals(2, q.size());
assertSame(four, q.poll());
first.join();
assertEquals(1, q.size());
interruptedThread.interrupt();
interruptedThread.join();
checkEmpty(q);
}
/**
* transfer waits until a poll occurs, at which point the polling
* thread returns the element
*/
public void testTransfer4() throws InterruptedException {
final LinkedTransferQueue q = new LinkedTransferQueue();
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
q.transfer(four);
assertFalse(q.contains(four));
assertSame(three, q.poll());
}});
while (q.isEmpty())
Thread.yield();
assertFalse(q.isEmpty());
assertEquals(1, q.size());
assertTrue(q.offer(three));
assertSame(four, q.poll());
awaitTermination(t);
}
/**
* transfer waits until a take occurs. The transfered element
* is returned by this associated take.
*/
public void testTransfer5() throws InterruptedException {
final LinkedTransferQueue<Integer> q
= new LinkedTransferQueue<Integer>();
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
q.transfer(four);
checkEmpty(q);
}});
while (q.isEmpty())
Thread.yield();
assertFalse(q.isEmpty());
assertEquals(1, q.size());
assertSame(four, q.take());
checkEmpty(q);
awaitTermination(t);
}
/**
* tryTransfer(null) throws NullPointerException
*/
public void testTryTransfer1() {
final LinkedTransferQueue q = new LinkedTransferQueue();
try {
q.tryTransfer(null);
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* tryTransfer returns false and does not enqueue if there are no
* consumers waiting to poll or take.
*/
public void testTryTransfer2() throws InterruptedException {
final LinkedTransferQueue q = new LinkedTransferQueue();
assertFalse(q.tryTransfer(new Object()));
assertFalse(q.hasWaitingConsumer());
checkEmpty(q);
}
/**
* If there is a consumer waiting in timed poll, tryTransfer
* returns true while successfully transfering object.
*/
public void testTryTransfer3() throws InterruptedException {
final Object hotPotato = new Object();
final LinkedTransferQueue q = new LinkedTransferQueue();
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() {
while (! q.hasWaitingConsumer())
Thread.yield();
assertTrue(q.hasWaitingConsumer());
checkEmpty(q);
assertTrue(q.tryTransfer(hotPotato));
}});
long startTime = System.nanoTime();
assertSame(hotPotato, q.poll(LONG_DELAY_MS, MILLISECONDS));
assertTrue(millisElapsedSince(startTime) < LONG_DELAY_MS);
checkEmpty(q);
awaitTermination(t);
}
/**
* If there is a consumer waiting in take, tryTransfer returns
* true while successfully transfering object.
*/
public void testTryTransfer4() throws InterruptedException {
final Object hotPotato = new Object();
final LinkedTransferQueue q = new LinkedTransferQueue();
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() {
while (! q.hasWaitingConsumer())
Thread.yield();
assertTrue(q.hasWaitingConsumer());
checkEmpty(q);
assertTrue(q.tryTransfer(hotPotato));
}});
assertSame(q.take(), hotPotato);
checkEmpty(q);
awaitTermination(t);
}
/**
* tryTransfer blocks interruptibly if no takers
*/
public void testTryTransfer5() throws InterruptedException {
final LinkedTransferQueue q = new LinkedTransferQueue();
final CountDownLatch pleaseInterrupt = new CountDownLatch(1);
assertTrue(q.isEmpty());
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
long startTime = System.nanoTime();
Thread.currentThread().interrupt();
try {
q.tryTransfer(new Object(), LONG_DELAY_MS, MILLISECONDS);
shouldThrow();
} catch (InterruptedException success) {}
assertFalse(Thread.interrupted());
pleaseInterrupt.countDown();
try {
q.tryTransfer(new Object(), LONG_DELAY_MS, MILLISECONDS);
shouldThrow();
} catch (InterruptedException success) {}
assertFalse(Thread.interrupted());
assertTrue(millisElapsedSince(startTime) < LONG_DELAY_MS);
}});
await(pleaseInterrupt);
assertThreadStaysAlive(t);
t.interrupt();
awaitTermination(t);
checkEmpty(q);
}
/**
* tryTransfer gives up after the timeout and returns false
*/
public void testTryTransfer6() throws InterruptedException {
final LinkedTransferQueue q = new LinkedTransferQueue();
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
long startTime = System.nanoTime();
assertFalse(q.tryTransfer(new Object(),
timeoutMillis(), MILLISECONDS));
assertTrue(millisElapsedSince(startTime) >= timeoutMillis());
checkEmpty(q);
}});
awaitTermination(t);
checkEmpty(q);
}
/**
* tryTransfer waits for any elements previously in to be removed
* before transfering to a poll or take
*/
public void testTryTransfer7() throws InterruptedException {
final LinkedTransferQueue q = new LinkedTransferQueue();
assertTrue(q.offer(four));
Thread t = newStartedThread(new CheckedRunnable() {
public void realRun() throws InterruptedException {
long startTime = System.nanoTime();
assertTrue(q.tryTransfer(five, LONG_DELAY_MS, MILLISECONDS));
assertTrue(millisElapsedSince(startTime) < LONG_DELAY_MS);
checkEmpty(q);
}});
while (q.size() != 2)
Thread.yield();
assertEquals(2, q.size());
assertSame(four, q.poll());
assertSame(five, q.poll());
checkEmpty(q);
awaitTermination(t);
}
/**
* tryTransfer attempts to enqueue into the queue and fails
* returning false not enqueueing and the successive poll is null
*/
public void testTryTransfer8() throws InterruptedException {
final LinkedTransferQueue q = new LinkedTransferQueue();
assertTrue(q.offer(four));
assertEquals(1, q.size());
long startTime = System.nanoTime();
assertFalse(q.tryTransfer(five, timeoutMillis(), MILLISECONDS));
assertTrue(millisElapsedSince(startTime) >= timeoutMillis());
assertEquals(1, q.size());
assertSame(four, q.poll());
assertNull(q.poll());
checkEmpty(q);
}
private LinkedTransferQueue<Integer> populatedQueue(int n) {
LinkedTransferQueue<Integer> q = new LinkedTransferQueue<Integer>();
checkEmpty(q);
for (int i = 0; i < n; i++) {
assertEquals(i, q.size());
assertTrue(q.offer(i));
assertEquals(Integer.MAX_VALUE, q.remainingCapacity());
}
assertFalse(q.isEmpty());
return q;
}
/**
* remove(null), contains(null) always return false
*/
public void testNeverContainsNull() {
Collection<?>[] qs = {
new LinkedTransferQueue<Object>(),
populatedQueue(2),
};
for (Collection<?> q : qs) {
assertFalse(q.contains(null));
assertFalse(q.remove(null));
}
}
}
|
apache/mina-sshd | 35,845 | sshd-scp/src/main/java/org/apache/sshd/scp/server/ScpShell.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.scp.server;
import java.io.File;
import java.io.IOError;
import java.io.IOException;
import java.io.InputStream;
import java.io.InterruptedIOException;
import java.io.OutputStream;
import java.io.Reader;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.nio.file.FileSystem;
import java.nio.file.Files;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeMap;
import java.util.function.Predicate;
import java.util.stream.Stream;
import org.apache.sshd.common.file.FileSystemFactory;
import org.apache.sshd.common.file.nativefs.NativeFileSystemFactory;
import org.apache.sshd.common.file.root.RootedFileSystem;
import org.apache.sshd.common.file.util.BaseFileSystem;
import org.apache.sshd.common.session.SessionContext;
import org.apache.sshd.common.util.GenericUtils;
import org.apache.sshd.common.util.io.IoUtils;
import org.apache.sshd.common.util.threads.CloseableExecutorService;
import org.apache.sshd.scp.ScpModuleProperties;
import org.apache.sshd.scp.common.ScpException;
import org.apache.sshd.scp.common.ScpFileOpener;
import org.apache.sshd.scp.common.ScpHelper;
import org.apache.sshd.scp.common.ScpTransferEventListener;
import org.apache.sshd.scp.common.helpers.DefaultScpFileOpener;
import org.apache.sshd.scp.common.helpers.ScpAckInfo;
import org.apache.sshd.server.Environment;
import org.apache.sshd.server.channel.ChannelSession;
import org.apache.sshd.server.channel.ServerChannelSessionHolder;
import org.apache.sshd.server.command.AbstractFileSystemCommand;
/**
* This command provides SCP support for a ChannelSession.
*
* @author <a href="mailto:dev@mina.apache.org">Apache MINA SSHD Project</a>
*/
public class ScpShell extends AbstractFileSystemCommand implements ServerChannelSessionHolder {
public static final String STATUS = "status";
/** The "PWD" environment variable */
public static final String ENV_PWD = "PWD";
/** The "HOME" environment variable */
public static final String ENV_HOME = "HOME";
/**
* Key for the language - format "en_US.UTF-8"
*/
public static final String ENV_LANG = "LANG";
private static final int LS_ALL = 1 << 0;
private static final int LS_DIR_PLAIN = 1 << 1;
private static final int LS_LONG = 1 << 2;
private static final int LS_FULL_TIME = 1 << 3;
private static final int SCP_D = 1 << 0;
private static final int SCP_F = 1 << 1;
private static final int SCP_P = 1 << 2;
private static final int SCP_R = 1 << 3;
private static final int SCP_T = 1 << 4;
protected final Map<String, Object> variables = new HashMap<>();
protected final Charset nameEncodingCharset;
protected final Charset envVarsEnodingCharset;
protected final ScpFileOpener opener;
protected final ScpTransferEventListener listener;
protected final int sendBufferSize;
protected final int receiveBufferSize;
protected Path currentDir;
protected Path homeDir;
private final ChannelSession channelSession;
public ScpShell(ChannelSession channelSession, CloseableExecutorService executorService,
int sendSize, int receiveSize,
ScpFileOpener fileOpener, ScpTransferEventListener eventListener) {
super(null, executorService);
this.channelSession = Objects.requireNonNull(channelSession, "No channel session provided");
nameEncodingCharset = ScpModuleProperties.SHELL_NAME_ENCODING_CHARSET.getRequired(channelSession);
envVarsEnodingCharset = ScpModuleProperties.SHELL_ENVVARS_ENCODING_CHARSET.getRequired(channelSession);
if (sendSize < ScpHelper.MIN_SEND_BUFFER_SIZE) {
throw new IllegalArgumentException("<ScpShell> send buffer size "
+ "(" + sendSize + ") below minimum required "
+ "(" + ScpHelper.MIN_SEND_BUFFER_SIZE + ")");
}
sendBufferSize = sendSize;
if (receiveSize < ScpHelper.MIN_RECEIVE_BUFFER_SIZE) {
throw new IllegalArgumentException("<ScpCommmand> receive buffer size "
+ "(" + sendSize + ") below minimum required "
+ "(" + ScpHelper.MIN_RECEIVE_BUFFER_SIZE + ")");
}
receiveBufferSize = receiveSize;
opener = (fileOpener == null) ? DefaultScpFileOpener.INSTANCE : fileOpener;
listener = (eventListener == null) ? ScpTransferEventListener.EMPTY : eventListener;
}
@Override
public ChannelSession getServerChannelSession() {
return channelSession;
}
@Override
public void setFileSystemFactory(FileSystemFactory factory, SessionContext session) throws IOException {
homeDir = factory.getUserHomeDir(session);
super.setFileSystemFactory(factory, session);
FileSystem fs = getFileSystem();
if (fs instanceof RootedFileSystem) {
Path fsLocalRoot = ((RootedFileSystem) fs).getRoot();
Path newHome = fs.getPath("/");
if (homeDir != null && homeDir.startsWith(fsLocalRoot)) {
homeDir = fsLocalRoot.relativize(homeDir);
int n = homeDir.getNameCount();
for (int i = 0; i < n; i++) {
Path p = homeDir.getName(i);
if (!p.toString().isEmpty()) {
newHome = newHome.resolve(p);
}
}
}
homeDir = newHome;
log.debug("Home dir in RootedFileSystem = {}", homeDir);
currentDir = homeDir;
} else if (fs instanceof BaseFileSystem<?>) {
homeDir = ((BaseFileSystem<?>) fs).getDefaultDir();
currentDir = homeDir;
} else if (factory instanceof NativeFileSystemFactory) {
// A native file system will allow the user to navigate anywhere. Not recommended.
if (homeDir == null) {
homeDir = new File(".").getCanonicalFile().toPath();
}
log.debug("Home dir in native FileSystem = {}", homeDir);
currentDir = homeDir;
} else {
throw new IOException("ScpShell filesystem must be native or a RootedFileSystem or BaseFileSystem");
}
}
protected void println(String cmd, Object x, OutputStream out, Charset cs) {
try {
String s = x.toString();
if (log.isDebugEnabled()) {
log.debug("println({})[{}]: {}",
getServerChannelSession(), cmd, s.replace('\n', ' ').replace('\t', ' '));
}
out.write(s.getBytes(cs));
// always write LF even if running on Windows
out.write('\n');
} catch (IOException e) {
throw new IOError(e);
}
}
protected void signalError(String cmd, String errorMsg) {
signalError(cmd, errorMsg, envVarsEnodingCharset);
}
protected void signalError(String cmd, String errorMsg, Charset cs) {
log.warn("{}[{}]: {}", getServerChannelSession(), cmd, errorMsg);
println(cmd, errorMsg, getErrorStream(), cs);
variables.put(STATUS, 1);
}
@Override
public void run() {
String command = null;
variables.put(STATUS, 0);
boolean debugEnabled = log.isDebugEnabled();
ChannelSession channel = getServerChannelSession();
try {
currentDir = homeDir;
if (debugEnabled) {
log.debug("run - starting at home dir={}", homeDir);
}
prepareEnvironment(getEnvironment());
Charset decodingCharset = ScpModuleProperties.SHELL_NAME_DECODING_CHARSET.getRequired(channel);
// Use a special stream reader so that the stream can be used with the scp command
try (InputStream inputStream = getInputStream();
Reader r = new InputStreamReader(inputStream, decodingCharset)) {
for (int executedCommands = 0;; executedCommands++) {
command = readLine(r);
if (GenericUtils.isEmpty(command)) {
if (debugEnabled) {
log.debug("run({}) Command loop terminated after {} commands", channel, executedCommands);
}
return;
}
if (!handleCommandLine(command)) {
if (debugEnabled) {
log.debug("run({}) Command loop terminated by cmd={} after {} commands",
channel, command, executedCommands);
}
return;
}
}
}
} catch (InterruptedIOException e) {
if (debugEnabled) {
log.debug("run({}) interrupted after command={}", channel, command);
}
} catch (Exception e) {
String message = "Failed (" + e.getClass().getSimpleName() + ") to handle '" + command + "': " + e.getMessage();
log.warn("run({}) {}", channel, message);
try {
OutputStream stderr = getErrorStream();
// Don't encode it with any user defined charset
stderr.write(message.getBytes(StandardCharsets.US_ASCII));
} catch (IOException ioe) {
log.warn("run({}) Failed ({}) to write error message={}: {}",
channel, ioe.getClass().getSimpleName(), message, ioe.getMessage());
} finally {
onExit(-1, message);
}
} finally {
onExit(0);
}
}
protected String readLine(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
while (true) {
int c = reader.read();
if ((c < 0) || c == '\n') {
break;
}
sb.append((char) c);
}
int len = sb.length();
// Strip CR at end of line if present
if ((len > 0) && (sb.charAt(len - 1) == '\r')) {
sb.setLength(len - 1);
}
return sb.toString();
}
protected boolean handleCommandLine(String command) throws Exception {
if (log.isDebugEnabled()) {
log.debug("handleCommandLine({}) {}", getServerChannelSession(), command);
}
List<String[]> cmds = parse(command);
OutputStream stdout = getOutputStream();
OutputStream stderr = getErrorStream();
for (String[] argv : cmds) {
switch (argv[0]) {
case "echo":
echo(argv);
break;
case "pwd":
pwd(argv);
break;
case "cd":
cd(argv);
break;
case "ls":
ls(argv);
break;
case "scp":
scp(command, argv);
break;
case "groups":
variables.put(STATUS, 0);
break;
case "printenv":
printenv(argv);
break;
case "unset":
unset(argv);
break;
case "unalias":
// Has no effect; we might also return status=0 (success)
variables.put(STATUS, 1);
break;
default:
// TODO: rm -r -f path to support deletions
// TODO: mv -f oldname newname to support renaming
// TODO: mkdir name to create a new directory
// TODO: ln -s target link if the file system supports links
// TODO: chmod
// TODO: cp -p -r -f for remote-only copy
// see https://github.com/winscp/winscp/blob/88b50c1/source/core/ScpFileSystem.cpp#L108
// There'd be more, like sha512sum for supporting the checksum tab of file properties
handleUnsupportedCommand(command, argv);
}
stdout.flush();
stderr.flush();
}
return true;
}
protected void prepareEnvironment(Environment environ) {
Map<String, String> env = environ.getEnv();
Locale locale = Locale.getDefault();
String languageTag = locale.toLanguageTag();
env.put(ENV_LANG, languageTag.replace('-', '_') + "." + nameEncodingCharset.displayName());
env.put(ENV_HOME, homeDir.toString());
updatePwdEnvVariable(currentDir);
}
protected void handleUnsupportedCommand(String command, String[] argv) throws Exception {
log.warn("handleUnsupportedCommand({}) unsupported: {}", getServerChannelSession(), command);
variables.put(STATUS, 127);
OutputStream errorStream = getErrorStream();
// Don't encode it with any user defined charset
errorStream.write(("command not found: " + argv[0] + "\n").getBytes(StandardCharsets.US_ASCII));
}
protected List<String[]> parse(String command) {
List<String[]> cmds = new ArrayList<>();
List<String> args = new ArrayList<>();
StringBuilder arg = new StringBuilder();
char quote = 0;
boolean escaped = false;
for (int i = 0; i < command.length(); i++) {
char ch = command.charAt(i);
if (escaped) {
arg.append(ch);
escaped = false;
} else if (ch == quote) {
quote = 0;
} else if (ch == '"' || ch == '\'') {
quote = ch;
} else if (ch == '\\') {
escaped = true;
} else if (quote == 0 && Character.isWhitespace(ch)) {
if (arg.length() > 0) {
args.add(arg.toString());
arg.setLength(0);
}
} else if (quote == 0 && ch == ';') {
if (arg.length() > 0) {
args.add(arg.toString());
arg.setLength(0);
}
if (!args.isEmpty()) {
cmds.add(args.toArray(new String[0]));
}
args.clear();
} else {
arg.append(ch);
}
}
if (arg.length() > 0) {
args.add(arg.toString());
arg.setLength(0);
}
if (!args.isEmpty()) {
cmds.add(args.toArray(new String[0]));
}
return cmds;
}
protected void printenv(String[] argv) throws Exception {
Environment environ = getEnvironment();
Map<String, String> envValues = environ.getEnv();
OutputStream stdout = getOutputStream();
if (argv.length == 1) {
envValues.entrySet()
.stream()
.forEach(e -> println(argv[0], e.getKey() + "=" + e.getValue(), stdout, envVarsEnodingCharset));
variables.put(STATUS, 0);
return;
}
if (argv.length != 2) {
signalError(argv[0], "printenv: only one variable value at a time");
return;
}
String varName = argv[1];
String varValue = resolveEnvironmentVariable(varName, envValues);
if (varValue == null) {
signalError(argv[0], "printenv: variable not set " + varName);
return;
}
if (log.isDebugEnabled()) {
log.debug("printenv({}) {}={}", getServerChannelSession(), varName, varValue);
}
println(argv[0], varValue, stdout, envVarsEnodingCharset);
variables.put(STATUS, 0);
}
protected String resolveEnvironmentVariable(String varName, Map<String, String> envValues) {
return envValues.get(varName);
}
protected void unset(String[] argv) throws Exception {
if (argv.length != 2) {
signalError(argv[0], "unset: exactly one argument is expected");
return;
}
Environment environ = getEnvironment();
Map<String, String> envValues = environ.getEnv();
String varName = argv[1];
String varValue = envValues.remove(varName);
if (log.isDebugEnabled()) {
log.debug("unset({}) {}={}", getServerChannelSession(), varName, varValue);
}
variables.put(STATUS, (varValue == null) ? 1 : 0);
}
protected void scp(String command, String[] argv) throws Exception {
int options = 0;
boolean isOption = true;
String path = null;
for (int i = 1; i < argv.length; i++) {
String argVal = argv[i];
if (GenericUtils.isEmpty(argVal)) {
signalError(argv[0], "scp: empty argument not allowed");
return;
}
if (isOption && (argVal.charAt(0) == '-')) {
if (argVal.length() != 2) {
signalError(argv[0], "scp: only one option at a time may be specified");
return;
}
// TODO should we raise an error if option re-specified ?
char optVal = argVal.charAt(1);
switch (optVal) {
case 'r':
options |= SCP_R;
break;
case 't':
options |= SCP_T;
break;
case 'f':
options |= SCP_F;
break;
case 'd':
options |= SCP_D;
break;
case 'p':
options |= SCP_P;
break;
default:
signalError(argv[0], "scp: unsupported option: " + argVal);
return;
}
} else if (path == null) {
// WinSCP sends local paths, but let's be sure here.
path = toScpPath(argVal);
isOption = false;
} else {
signalError(argv[0], "scp: one and only one path argument expected");
return;
}
}
int tf = options & (SCP_T | SCP_F);
if (tf != SCP_T && tf != SCP_F) {
signalError(argv[0], "scp: one and only one of -t and -f option expected");
return;
}
doScp(command, path, options);
}
protected void doScp(String command, String path, int options) throws Exception {
try {
ChannelSession channel = getServerChannelSession();
ScpHelper helper = new ScpHelper(
channel.getSession(), getInputStream(), getOutputStream(),
fileSystem, opener, listener);
Path localPath = currentDir.resolve(path);
if ((options & SCP_T) != 0) {
if (log.isDebugEnabled()) {
log.debug("doScp({}) receiving file in {} at {}", getServerChannelSession(), path, localPath);
}
helper.receive(command, localPath, (options & SCP_R) != 0, (options & SCP_D) != 0, (options & SCP_P) != 0,
receiveBufferSize);
} else {
if (log.isDebugEnabled()) {
log.debug("doScp({}) sending file {} from {}", getServerChannelSession(), path, localPath);
}
helper.send(Collections.singletonList(localPath.toString()), (options & SCP_R) != 0, (options & SCP_P) != 0,
sendBufferSize);
}
variables.put(STATUS, 0);
} catch (IOException e) {
Integer statusCode = e instanceof ScpException ? ((ScpException) e).getExitStatus() : null;
int exitValue = (statusCode == null) ? ScpAckInfo.ERROR : statusCode;
// this is an exception so status cannot be OK/WARNING
if ((exitValue == ScpAckInfo.OK) || (exitValue == ScpAckInfo.WARNING)) {
exitValue = ScpAckInfo.ERROR;
}
String exitMessage = GenericUtils.trimToEmpty(e.getMessage());
ScpAckInfo.sendAck(getOutputStream(), StandardCharsets.UTF_8, exitValue, exitMessage);
variables.put(STATUS, exitValue);
}
}
protected void echo(String[] argv) throws Exception {
StringBuilder buf = new StringBuilder();
for (int k = 1; k < argv.length; k++) {
String arg = argv[k];
if (buf.length() > 0) {
buf.append(' ');
}
int vstart = -1;
for (int i = 0; i < arg.length(); i++) {
int c = arg.charAt(i);
if (vstart >= 0) {
if (c != '_' && (c < '0' || c > '9') && (c < 'A' || c > 'Z') && (c < 'a' || c > 'z')) {
if (vstart == i) {
buf.append('$');
} else {
String n = arg.substring(vstart, i);
Object v = variables.get(n);
if (v != null) {
buf.append(v);
}
}
vstart = -1;
}
} else if (c == '$') {
vstart = i + 1;
} else {
buf.append((char) c);
}
}
if (vstart >= 0) {
String n = arg.substring(vstart);
if (n.isEmpty()) {
buf.append('$');
} else {
Object v = variables.get(n);
if (v != null) {
buf.append(v);
}
}
}
}
println(argv[0], buf, getOutputStream(), nameEncodingCharset);
variables.put(STATUS, 0);
}
protected void pwd(String[] argv) throws Exception {
if (argv.length != 1) {
signalError(argv[0], "pwd: too many arguments");
} else {
println(argv[0], currentDir, getOutputStream(), nameEncodingCharset);
variables.put(STATUS, 0);
}
}
private String toScpPath(String winScpPath) {
// WinSCP may send windows paths like C:\foo\bar. Map this to a virtual path if needed.
String separator = fileSystem.getSeparator();
String scpPath = winScpPath.replace("\\", separator);
if (scpPath.equals(winScpPath)) {
// Assume it's OK
return scpPath;
}
int i = scpPath.indexOf(separator);
// TODO: UNC paths? Funny \? prefixes? Looks like WinSCP doesn't send those.
if (i == 2 && scpPath.charAt(1) == ':') {
// Strip drive letter
scpPath = scpPath.substring(2);
}
return scpPath;
}
protected void cd(String[] argv) throws Exception {
if (argv.length == 1) {
if (homeDir != null) {
currentDir = homeDir;
updatePwdEnvVariable(currentDir);
variables.put(STATUS, 0);
} else {
signalError(argv[0], "No home directory to return to");
}
return;
}
if (argv.length != 2) {
signalError(argv[0], "cd: too many or too few arguments");
return;
}
String path = argv[1];
if (GenericUtils.isEmpty(path)) {
signalError(argv[0], "cd: empty target");
return;
}
// TODO make sure not escaping the user's sandbox filesystem
Path cwd = currentDir;
cwd = cwd.resolve(toScpPath(path)).toAbsolutePath().normalize();
if (!Files.exists(cwd)) {
signalError(argv[0], "no such file or directory: " + path, nameEncodingCharset);
} else if (!Files.isDirectory(cwd)) {
signalError(argv[0], "not a directory: " + path, nameEncodingCharset);
} else {
if (log.isDebugEnabled()) {
log.debug("cd - {} => {}", currentDir, cwd);
}
currentDir = cwd;
updatePwdEnvVariable(currentDir);
variables.put(STATUS, 0);
}
}
protected void updatePwdEnvVariable(Path pwd) {
Environment environ = getEnvironment();
Map<String, String> envVars = environ.getEnv();
envVars.put(ENV_PWD, pwd.toString());
}
protected void ls(String[] argv) throws Exception {
int options = 0;
String path = null;
for (int k = 1; k < argv.length; k++) {
String argValue = argv[k];
if (GenericUtils.isEmpty(argValue)) {
signalError(argv[0], "ls: empty argument not allowed");
return;
}
if (argValue.equals("--full-time")) {
options |= LS_FULL_TIME;
} else if (argValue.charAt(0) == '-') {
int argLen = argValue.length();
if (argLen == 1) {
signalError(argv[0], "ls: no option specified");
return;
}
for (int i = 1; i < argLen; i++) {
char optValue = argValue.charAt(i);
// TODO should we raise an error if option re-specified ?
switch (optValue) {
case 'a':
options |= LS_ALL;
break;
case 'd':
options |= LS_DIR_PLAIN;
break;
case 'l':
options |= LS_LONG;
break;
default:
signalError(argv[0], "unsupported option: -" + optValue);
return;
}
}
} else if (path == null) {
path = toScpPath(argValue);
} else {
signalError(argv[0], "unsupported option: " + argValue);
return;
}
}
doLs(argv[0], path, options);
}
protected void doLs(String cmd, String path, int options) throws Exception {
boolean listDirectory = path == null;
Path toList = currentDir;
if (path != null) {
toList = currentDir.resolve(path);
listDirectory = ((options & LS_DIR_PLAIN) == 0) && Files.isDirectory(toList);
}
Path inDir = listDirectory ? toList : currentDir;
// Hide the .. entry if we're listing the root
Stream<String> dotDirs = Stream.empty();
if (listDirectory) {
dotDirs = toList.getNameCount() == 0 ? Stream.of(".") : Stream.of(".", "..");
}
Predicate<Path> filter;
if (!listDirectory || (options & LS_ALL) != 0) {
filter = p -> true;
} else {
filter = p -> {
String fileName = p.getFileName().toString();
return fileName.equals(".") || fileName.equals("..") || !fileName.startsWith(".");
};
}
try (Stream<Path> files = !listDirectory
? Stream.of(toList)
: Stream.concat(dotDirs.map(toList::resolve), Files.list(toList))) {
OutputStream stdout = getOutputStream();
OutputStream stderr = getErrorStream();
variables.put(STATUS, 0);
files
.filter(filter)
.map(p -> new PathEntry(p, inDir))
.sorted()
.forEach(p -> {
try {
String str = p.display((options & LS_LONG) != 0, (options & LS_FULL_TIME) != 0);
println(cmd, str, stdout, nameEncodingCharset);
} catch (NoSuchFileException e) {
println(cmd, cmd + ": " + p.path.toString() + ": no such file or directory", stderr,
nameEncodingCharset);
variables.put(STATUS, 1);
}
});
}
}
protected static class PathEntry implements Comparable<PathEntry> {
// WinSCP needs the month names always in English.
public static final DateTimeFormatter FULL_TIME_VALUE_FORMATTER = DateTimeFormatter.ofPattern("MMM ppd HH:mm:ss yyyy",
Locale.ENGLISH);
public static final DateTimeFormatter TIME_ONLY_VALUE_FORMATTER = DateTimeFormatter.ofPattern("MMM ppd HH:mm",
Locale.ENGLISH);
public static final DateTimeFormatter YEAR_VALUE_FORMATTER = DateTimeFormatter.ofPattern("MMM ppd yyyy",
Locale.ENGLISH);
protected final Path abs;
protected final Path path;
protected final Map<String, Object> attributes;
public PathEntry(Path abs, Path root) {
this.abs = abs;
this.path = abs.startsWith(root) ? root.relativize(abs) : abs;
this.attributes = readAttributes(abs);
}
@Override
public int compareTo(PathEntry o) {
return path.toString().compareTo(o.path.toString());
}
@Override
public String toString() {
return Objects.toString(abs);
}
public String display(boolean optLongDisplay, boolean optFullTime) throws NoSuchFileException {
if (attributes.isEmpty()) {
throw new NoSuchFileException(path.toString());
}
String abbrev = shortDisplay();
if (!optLongDisplay) {
return abbrev;
}
StringBuilder sb = new StringBuilder(abbrev.length() + 64);
if (is(IoUtils.DIRECTORY_VIEW_ATTR)) {
sb.append('d');
} else if (is(IoUtils.SYMLINK_VIEW_ATTR)) {
sb.append('l');
} else if (is(IoUtils.OTHERFILE_VIEW_ATTR)) {
sb.append('o');
} else {
sb.append('-');
}
@SuppressWarnings("unchecked")
Set<PosixFilePermission> perms = (Set<PosixFilePermission>) attributes.get(IoUtils.PERMISSIONS_VIEW_ATTR);
if (perms == null) {
perms = EnumSet.noneOf(PosixFilePermission.class);
}
sb.append(PosixFilePermissions.toString(perms));
Object nlinkValue = attributes.get(IoUtils.NUMLINKS_VIEW_ATTR);
sb.append(' ').append(String.format("%3s", (nlinkValue != null) ? nlinkValue : "1"));
appendOwnerInformation(sb, IoUtils.OWNER_VIEW_ATTR, "owner");
appendOwnerInformation(sb, IoUtils.GROUP_VIEW_ATTR, "group");
Number length = (Number) attributes.get(IoUtils.SIZE_VIEW_ATTR);
if (length == null) {
length = 0L;
}
sb.append(' ').append(String.format("%1$8s", length));
String timeValue = toString((FileTime) attributes.get(IoUtils.LASTMOD_TIME_VIEW_ATTR), optFullTime);
sb.append(' ').append(timeValue);
sb.append(' ').append(abbrev);
return sb.toString();
}
protected boolean is(String attr) {
Object d = attributes.get(attr);
return (d instanceof Boolean) && (Boolean) d;
}
protected StringBuilder appendOwnerInformation(
StringBuilder sb, String attr, String defaultValue) {
String owner = Objects.toString(attributes.get(attr), null);
if (GenericUtils.isEmpty(owner)) {
owner = defaultValue;
}
if (owner.length() > 8) {
owner = owner.substring(0, 8);
}
sb.append(' ').append(owner);
for (int index = owner.length(); index < 8; index++) {
sb.append(' ');
}
return sb;
}
protected String shortDisplay() {
if (is("isSymbolicLink")) {
try {
Path l = Files.readSymbolicLink(abs);
return path + " -> " + l;
} catch (IOException e) {
// ignore
}
}
String str = path.toString();
if (str.isEmpty()) {
return abs.getFileName().toString();
}
return str;
}
protected static String toString(FileTime time, boolean optFullTime) {
long millis = (time != null) ? time.toMillis() : -1L;
if (millis < 0L) {
return "------------";
}
ZonedDateTime dt = Instant.ofEpochMilli(millis).atZone(ZoneId.systemDefault());
if (optFullTime) {
return FULL_TIME_VALUE_FORMATTER.format(dt);
} else if (System.currentTimeMillis() - millis < 183L * 24L * 60L * 60L * 1000L) {
return TIME_ONLY_VALUE_FORMATTER.format(dt);
} else {
return YEAR_VALUE_FORMATTER.format(dt);
}
}
protected static Map<String, Object> readAttributes(Path path) {
Map<String, Object> attrs = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
FileSystem fs = path.getFileSystem();
Collection<String> views = fs.supportedFileAttributeViews();
for (String view : views) {
try {
Map<String, Object> ta = Files.readAttributes(
path, view + ":*", IoUtils.getLinkOptions(false));
ta.forEach(attrs::putIfAbsent);
} catch (IOException e) {
// Ignore
}
}
if (!attrs.isEmpty()) {
attrs.computeIfAbsent(IoUtils.EXECUTABLE_VIEW_ATTR, s -> Files.isExecutable(path));
attrs.computeIfAbsent(IoUtils.PERMISSIONS_VIEW_ATTR, s -> IoUtils.getPermissionsFromFile(path.toFile()));
}
return attrs;
}
}
}
|
google/schemaorg-java | 35,814 | src/main/java/com/google/schemaorg/core/VideoGameSeries.java | /*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.schemaorg.core;
import com.google.common.collect.ImmutableList;
import com.google.schemaorg.JsonLdContext;
import com.google.schemaorg.SchemaOrgType;
import com.google.schemaorg.core.datatype.Date;
import com.google.schemaorg.core.datatype.DateTime;
import com.google.schemaorg.core.datatype.Integer;
import com.google.schemaorg.core.datatype.Number;
import com.google.schemaorg.core.datatype.Text;
import com.google.schemaorg.core.datatype.URL;
import com.google.schemaorg.goog.PopularityScoreSpecification;
import javax.annotation.Nullable;
/**
* Interface of <a href="http://schema.org/VideoGameSeries}">http://schema.org/VideoGameSeries}</a>.
*/
public interface VideoGameSeries extends CreativeWorkSeries {
/**
* Builder interface of <a
* href="http://schema.org/VideoGameSeries}">http://schema.org/VideoGameSeries}</a>.
*/
public interface Builder extends CreativeWorkSeries.Builder {
@Override
Builder addJsonLdContext(@Nullable JsonLdContext context);
@Override
Builder addJsonLdContext(@Nullable JsonLdContext.Builder context);
@Override
Builder setJsonLdId(@Nullable String value);
@Override
Builder setJsonLdReverse(String property, Thing obj);
@Override
Builder setJsonLdReverse(String property, Thing.Builder builder);
/** Add a value to property about. */
Builder addAbout(Thing value);
/** Add a value to property about. */
Builder addAbout(Thing.Builder value);
/** Add a value to property about. */
Builder addAbout(String value);
/** Add a value to property accessibilityAPI. */
Builder addAccessibilityAPI(Text value);
/** Add a value to property accessibilityAPI. */
Builder addAccessibilityAPI(String value);
/** Add a value to property accessibilityControl. */
Builder addAccessibilityControl(Text value);
/** Add a value to property accessibilityControl. */
Builder addAccessibilityControl(String value);
/** Add a value to property accessibilityFeature. */
Builder addAccessibilityFeature(Text value);
/** Add a value to property accessibilityFeature. */
Builder addAccessibilityFeature(String value);
/** Add a value to property accessibilityHazard. */
Builder addAccessibilityHazard(Text value);
/** Add a value to property accessibilityHazard. */
Builder addAccessibilityHazard(String value);
/** Add a value to property accountablePerson. */
Builder addAccountablePerson(Person value);
/** Add a value to property accountablePerson. */
Builder addAccountablePerson(Person.Builder value);
/** Add a value to property accountablePerson. */
Builder addAccountablePerson(String value);
/** Add a value to property actor. */
Builder addActor(Person value);
/** Add a value to property actor. */
Builder addActor(Person.Builder value);
/** Add a value to property actor. */
Builder addActor(String value);
/** Add a value to property actors. */
Builder addActors(Person value);
/** Add a value to property actors. */
Builder addActors(Person.Builder value);
/** Add a value to property actors. */
Builder addActors(String value);
/** Add a value to property additionalType. */
Builder addAdditionalType(URL value);
/** Add a value to property additionalType. */
Builder addAdditionalType(String value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(AggregateRating.Builder value);
/** Add a value to property aggregateRating. */
Builder addAggregateRating(String value);
/** Add a value to property alternateName. */
Builder addAlternateName(Text value);
/** Add a value to property alternateName. */
Builder addAlternateName(String value);
/** Add a value to property alternativeHeadline. */
Builder addAlternativeHeadline(Text value);
/** Add a value to property alternativeHeadline. */
Builder addAlternativeHeadline(String value);
/** Add a value to property associatedMedia. */
Builder addAssociatedMedia(MediaObject value);
/** Add a value to property associatedMedia. */
Builder addAssociatedMedia(MediaObject.Builder value);
/** Add a value to property associatedMedia. */
Builder addAssociatedMedia(String value);
/** Add a value to property audience. */
Builder addAudience(Audience value);
/** Add a value to property audience. */
Builder addAudience(Audience.Builder value);
/** Add a value to property audience. */
Builder addAudience(String value);
/** Add a value to property audio. */
Builder addAudio(AudioObject value);
/** Add a value to property audio. */
Builder addAudio(AudioObject.Builder value);
/** Add a value to property audio. */
Builder addAudio(String value);
/** Add a value to property author. */
Builder addAuthor(Organization value);
/** Add a value to property author. */
Builder addAuthor(Organization.Builder value);
/** Add a value to property author. */
Builder addAuthor(Person value);
/** Add a value to property author. */
Builder addAuthor(Person.Builder value);
/** Add a value to property author. */
Builder addAuthor(String value);
/** Add a value to property award. */
Builder addAward(Text value);
/** Add a value to property award. */
Builder addAward(String value);
/** Add a value to property awards. */
Builder addAwards(Text value);
/** Add a value to property awards. */
Builder addAwards(String value);
/** Add a value to property character. */
Builder addCharacter(Person value);
/** Add a value to property character. */
Builder addCharacter(Person.Builder value);
/** Add a value to property character. */
Builder addCharacter(String value);
/** Add a value to property characterAttribute. */
Builder addCharacterAttribute(Thing value);
/** Add a value to property characterAttribute. */
Builder addCharacterAttribute(Thing.Builder value);
/** Add a value to property characterAttribute. */
Builder addCharacterAttribute(String value);
/** Add a value to property cheatCode. */
Builder addCheatCode(CreativeWork value);
/** Add a value to property cheatCode. */
Builder addCheatCode(CreativeWork.Builder value);
/** Add a value to property cheatCode. */
Builder addCheatCode(String value);
/** Add a value to property citation. */
Builder addCitation(CreativeWork value);
/** Add a value to property citation. */
Builder addCitation(CreativeWork.Builder value);
/** Add a value to property citation. */
Builder addCitation(Text value);
/** Add a value to property citation. */
Builder addCitation(String value);
/** Add a value to property comment. */
Builder addComment(Comment value);
/** Add a value to property comment. */
Builder addComment(Comment.Builder value);
/** Add a value to property comment. */
Builder addComment(String value);
/** Add a value to property commentCount. */
Builder addCommentCount(Integer value);
/** Add a value to property commentCount. */
Builder addCommentCount(String value);
/** Add a value to property containsSeason. */
Builder addContainsSeason(CreativeWorkSeason value);
/** Add a value to property containsSeason. */
Builder addContainsSeason(CreativeWorkSeason.Builder value);
/** Add a value to property containsSeason. */
Builder addContainsSeason(String value);
/** Add a value to property contentLocation. */
Builder addContentLocation(Place value);
/** Add a value to property contentLocation. */
Builder addContentLocation(Place.Builder value);
/** Add a value to property contentLocation. */
Builder addContentLocation(String value);
/** Add a value to property contentRating. */
Builder addContentRating(Text value);
/** Add a value to property contentRating. */
Builder addContentRating(String value);
/** Add a value to property contributor. */
Builder addContributor(Organization value);
/** Add a value to property contributor. */
Builder addContributor(Organization.Builder value);
/** Add a value to property contributor. */
Builder addContributor(Person value);
/** Add a value to property contributor. */
Builder addContributor(Person.Builder value);
/** Add a value to property contributor. */
Builder addContributor(String value);
/** Add a value to property copyrightHolder. */
Builder addCopyrightHolder(Organization value);
/** Add a value to property copyrightHolder. */
Builder addCopyrightHolder(Organization.Builder value);
/** Add a value to property copyrightHolder. */
Builder addCopyrightHolder(Person value);
/** Add a value to property copyrightHolder. */
Builder addCopyrightHolder(Person.Builder value);
/** Add a value to property copyrightHolder. */
Builder addCopyrightHolder(String value);
/** Add a value to property copyrightYear. */
Builder addCopyrightYear(Number value);
/** Add a value to property copyrightYear. */
Builder addCopyrightYear(String value);
/** Add a value to property creator. */
Builder addCreator(Organization value);
/** Add a value to property creator. */
Builder addCreator(Organization.Builder value);
/** Add a value to property creator. */
Builder addCreator(Person value);
/** Add a value to property creator. */
Builder addCreator(Person.Builder value);
/** Add a value to property creator. */
Builder addCreator(String value);
/** Add a value to property dateCreated. */
Builder addDateCreated(Date value);
/** Add a value to property dateCreated. */
Builder addDateCreated(DateTime value);
/** Add a value to property dateCreated. */
Builder addDateCreated(String value);
/** Add a value to property dateModified. */
Builder addDateModified(Date value);
/** Add a value to property dateModified. */
Builder addDateModified(DateTime value);
/** Add a value to property dateModified. */
Builder addDateModified(String value);
/** Add a value to property datePublished. */
Builder addDatePublished(Date value);
/** Add a value to property datePublished. */
Builder addDatePublished(String value);
/** Add a value to property description. */
Builder addDescription(Text value);
/** Add a value to property description. */
Builder addDescription(String value);
/** Add a value to property director. */
Builder addDirector(Person value);
/** Add a value to property director. */
Builder addDirector(Person.Builder value);
/** Add a value to property director. */
Builder addDirector(String value);
/** Add a value to property directors. */
Builder addDirectors(Person value);
/** Add a value to property directors. */
Builder addDirectors(Person.Builder value);
/** Add a value to property directors. */
Builder addDirectors(String value);
/** Add a value to property discussionUrl. */
Builder addDiscussionUrl(URL value);
/** Add a value to property discussionUrl. */
Builder addDiscussionUrl(String value);
/** Add a value to property editor. */
Builder addEditor(Person value);
/** Add a value to property editor. */
Builder addEditor(Person.Builder value);
/** Add a value to property editor. */
Builder addEditor(String value);
/** Add a value to property educationalAlignment. */
Builder addEducationalAlignment(AlignmentObject value);
/** Add a value to property educationalAlignment. */
Builder addEducationalAlignment(AlignmentObject.Builder value);
/** Add a value to property educationalAlignment. */
Builder addEducationalAlignment(String value);
/** Add a value to property educationalUse. */
Builder addEducationalUse(Text value);
/** Add a value to property educationalUse. */
Builder addEducationalUse(String value);
/** Add a value to property encoding. */
Builder addEncoding(MediaObject value);
/** Add a value to property encoding. */
Builder addEncoding(MediaObject.Builder value);
/** Add a value to property encoding. */
Builder addEncoding(String value);
/** Add a value to property encodings. */
Builder addEncodings(MediaObject value);
/** Add a value to property encodings. */
Builder addEncodings(MediaObject.Builder value);
/** Add a value to property encodings. */
Builder addEncodings(String value);
/** Add a value to property endDate. */
Builder addEndDate(Date value);
/** Add a value to property endDate. */
Builder addEndDate(String value);
/** Add a value to property episode. */
Builder addEpisode(Episode value);
/** Add a value to property episode. */
Builder addEpisode(Episode.Builder value);
/** Add a value to property episode. */
Builder addEpisode(String value);
/** Add a value to property episodes. */
Builder addEpisodes(Episode value);
/** Add a value to property episodes. */
Builder addEpisodes(Episode.Builder value);
/** Add a value to property episodes. */
Builder addEpisodes(String value);
/** Add a value to property exampleOfWork. */
Builder addExampleOfWork(CreativeWork value);
/** Add a value to property exampleOfWork. */
Builder addExampleOfWork(CreativeWork.Builder value);
/** Add a value to property exampleOfWork. */
Builder addExampleOfWork(String value);
/** Add a value to property fileFormat. */
Builder addFileFormat(Text value);
/** Add a value to property fileFormat. */
Builder addFileFormat(String value);
/** Add a value to property gameItem. */
Builder addGameItem(Thing value);
/** Add a value to property gameItem. */
Builder addGameItem(Thing.Builder value);
/** Add a value to property gameItem. */
Builder addGameItem(String value);
/** Add a value to property gameLocation. */
Builder addGameLocation(Place value);
/** Add a value to property gameLocation. */
Builder addGameLocation(Place.Builder value);
/** Add a value to property gameLocation. */
Builder addGameLocation(PostalAddress value);
/** Add a value to property gameLocation. */
Builder addGameLocation(PostalAddress.Builder value);
/** Add a value to property gameLocation. */
Builder addGameLocation(URL value);
/** Add a value to property gameLocation. */
Builder addGameLocation(String value);
/** Add a value to property gamePlatform. */
Builder addGamePlatform(Text value);
/** Add a value to property gamePlatform. */
Builder addGamePlatform(Thing value);
/** Add a value to property gamePlatform. */
Builder addGamePlatform(Thing.Builder value);
/** Add a value to property gamePlatform. */
Builder addGamePlatform(URL value);
/** Add a value to property gamePlatform. */
Builder addGamePlatform(String value);
/** Add a value to property genre. */
Builder addGenre(Text value);
/** Add a value to property genre. */
Builder addGenre(URL value);
/** Add a value to property genre. */
Builder addGenre(String value);
/** Add a value to property hasPart. */
Builder addHasPart(CreativeWork value);
/** Add a value to property hasPart. */
Builder addHasPart(CreativeWork.Builder value);
/** Add a value to property hasPart. */
Builder addHasPart(String value);
/** Add a value to property headline. */
Builder addHeadline(Text value);
/** Add a value to property headline. */
Builder addHeadline(String value);
/** Add a value to property image. */
Builder addImage(ImageObject value);
/** Add a value to property image. */
Builder addImage(ImageObject.Builder value);
/** Add a value to property image. */
Builder addImage(URL value);
/** Add a value to property image. */
Builder addImage(String value);
/** Add a value to property inLanguage. */
Builder addInLanguage(Language value);
/** Add a value to property inLanguage. */
Builder addInLanguage(Language.Builder value);
/** Add a value to property inLanguage. */
Builder addInLanguage(Text value);
/** Add a value to property inLanguage. */
Builder addInLanguage(String value);
/** Add a value to property interactionStatistic. */
Builder addInteractionStatistic(InteractionCounter value);
/** Add a value to property interactionStatistic. */
Builder addInteractionStatistic(InteractionCounter.Builder value);
/** Add a value to property interactionStatistic. */
Builder addInteractionStatistic(String value);
/** Add a value to property interactivityType. */
Builder addInteractivityType(Text value);
/** Add a value to property interactivityType. */
Builder addInteractivityType(String value);
/** Add a value to property isBasedOnUrl. */
Builder addIsBasedOnUrl(URL value);
/** Add a value to property isBasedOnUrl. */
Builder addIsBasedOnUrl(String value);
/** Add a value to property isFamilyFriendly. */
Builder addIsFamilyFriendly(Boolean value);
/** Add a value to property isFamilyFriendly. */
Builder addIsFamilyFriendly(String value);
/** Add a value to property isPartOf. */
Builder addIsPartOf(CreativeWork value);
/** Add a value to property isPartOf. */
Builder addIsPartOf(CreativeWork.Builder value);
/** Add a value to property isPartOf. */
Builder addIsPartOf(String value);
/** Add a value to property keywords. */
Builder addKeywords(Text value);
/** Add a value to property keywords. */
Builder addKeywords(String value);
/** Add a value to property learningResourceType. */
Builder addLearningResourceType(Text value);
/** Add a value to property learningResourceType. */
Builder addLearningResourceType(String value);
/** Add a value to property license. */
Builder addLicense(CreativeWork value);
/** Add a value to property license. */
Builder addLicense(CreativeWork.Builder value);
/** Add a value to property license. */
Builder addLicense(URL value);
/** Add a value to property license. */
Builder addLicense(String value);
/** Add a value to property locationCreated. */
Builder addLocationCreated(Place value);
/** Add a value to property locationCreated. */
Builder addLocationCreated(Place.Builder value);
/** Add a value to property locationCreated. */
Builder addLocationCreated(String value);
/** Add a value to property mainEntity. */
Builder addMainEntity(Thing value);
/** Add a value to property mainEntity. */
Builder addMainEntity(Thing.Builder value);
/** Add a value to property mainEntity. */
Builder addMainEntity(String value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(CreativeWork.Builder value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(URL value);
/** Add a value to property mainEntityOfPage. */
Builder addMainEntityOfPage(String value);
/** Add a value to property mentions. */
Builder addMentions(Thing value);
/** Add a value to property mentions. */
Builder addMentions(Thing.Builder value);
/** Add a value to property mentions. */
Builder addMentions(String value);
/** Add a value to property musicBy. */
Builder addMusicBy(MusicGroup value);
/** Add a value to property musicBy. */
Builder addMusicBy(MusicGroup.Builder value);
/** Add a value to property musicBy. */
Builder addMusicBy(Person value);
/** Add a value to property musicBy. */
Builder addMusicBy(Person.Builder value);
/** Add a value to property musicBy. */
Builder addMusicBy(String value);
/** Add a value to property name. */
Builder addName(Text value);
/** Add a value to property name. */
Builder addName(String value);
/** Add a value to property numberOfEpisodes. */
Builder addNumberOfEpisodes(Integer value);
/** Add a value to property numberOfEpisodes. */
Builder addNumberOfEpisodes(String value);
/** Add a value to property numberOfPlayers. */
Builder addNumberOfPlayers(QuantitativeValue value);
/** Add a value to property numberOfPlayers. */
Builder addNumberOfPlayers(QuantitativeValue.Builder value);
/** Add a value to property numberOfPlayers. */
Builder addNumberOfPlayers(String value);
/** Add a value to property numberOfSeasons. */
Builder addNumberOfSeasons(Integer value);
/** Add a value to property numberOfSeasons. */
Builder addNumberOfSeasons(String value);
/** Add a value to property offers. */
Builder addOffers(Offer value);
/** Add a value to property offers. */
Builder addOffers(Offer.Builder value);
/** Add a value to property offers. */
Builder addOffers(String value);
/** Add a value to property playMode. */
Builder addPlayMode(GamePlayMode value);
/** Add a value to property playMode. */
Builder addPlayMode(String value);
/** Add a value to property position. */
Builder addPosition(Integer value);
/** Add a value to property position. */
Builder addPosition(Text value);
/** Add a value to property position. */
Builder addPosition(String value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(Action.Builder value);
/** Add a value to property potentialAction. */
Builder addPotentialAction(String value);
/** Add a value to property producer. */
Builder addProducer(Organization value);
/** Add a value to property producer. */
Builder addProducer(Organization.Builder value);
/** Add a value to property producer. */
Builder addProducer(Person value);
/** Add a value to property producer. */
Builder addProducer(Person.Builder value);
/** Add a value to property producer. */
Builder addProducer(String value);
/** Add a value to property productionCompany. */
Builder addProductionCompany(Organization value);
/** Add a value to property productionCompany. */
Builder addProductionCompany(Organization.Builder value);
/** Add a value to property productionCompany. */
Builder addProductionCompany(String value);
/** Add a value to property provider. */
Builder addProvider(Organization value);
/** Add a value to property provider. */
Builder addProvider(Organization.Builder value);
/** Add a value to property provider. */
Builder addProvider(Person value);
/** Add a value to property provider. */
Builder addProvider(Person.Builder value);
/** Add a value to property provider. */
Builder addProvider(String value);
/** Add a value to property publication. */
Builder addPublication(PublicationEvent value);
/** Add a value to property publication. */
Builder addPublication(PublicationEvent.Builder value);
/** Add a value to property publication. */
Builder addPublication(String value);
/** Add a value to property publisher. */
Builder addPublisher(Organization value);
/** Add a value to property publisher. */
Builder addPublisher(Organization.Builder value);
/** Add a value to property publisher. */
Builder addPublisher(Person value);
/** Add a value to property publisher. */
Builder addPublisher(Person.Builder value);
/** Add a value to property publisher. */
Builder addPublisher(String value);
/** Add a value to property publishingPrinciples. */
Builder addPublishingPrinciples(URL value);
/** Add a value to property publishingPrinciples. */
Builder addPublishingPrinciples(String value);
/** Add a value to property quest. */
Builder addQuest(Thing value);
/** Add a value to property quest. */
Builder addQuest(Thing.Builder value);
/** Add a value to property quest. */
Builder addQuest(String value);
/** Add a value to property recordedAt. */
Builder addRecordedAt(Event value);
/** Add a value to property recordedAt. */
Builder addRecordedAt(Event.Builder value);
/** Add a value to property recordedAt. */
Builder addRecordedAt(String value);
/** Add a value to property releasedEvent. */
Builder addReleasedEvent(PublicationEvent value);
/** Add a value to property releasedEvent. */
Builder addReleasedEvent(PublicationEvent.Builder value);
/** Add a value to property releasedEvent. */
Builder addReleasedEvent(String value);
/** Add a value to property review. */
Builder addReview(Review value);
/** Add a value to property review. */
Builder addReview(Review.Builder value);
/** Add a value to property review. */
Builder addReview(String value);
/** Add a value to property reviews. */
Builder addReviews(Review value);
/** Add a value to property reviews. */
Builder addReviews(Review.Builder value);
/** Add a value to property reviews. */
Builder addReviews(String value);
/** Add a value to property sameAs. */
Builder addSameAs(URL value);
/** Add a value to property sameAs. */
Builder addSameAs(String value);
/** Add a value to property schemaVersion. */
Builder addSchemaVersion(Text value);
/** Add a value to property schemaVersion. */
Builder addSchemaVersion(URL value);
/** Add a value to property schemaVersion. */
Builder addSchemaVersion(String value);
/** Add a value to property season. */
Builder addSeason(CreativeWorkSeason value);
/** Add a value to property season. */
Builder addSeason(CreativeWorkSeason.Builder value);
/** Add a value to property season. */
Builder addSeason(String value);
/** Add a value to property seasons. */
Builder addSeasons(CreativeWorkSeason value);
/** Add a value to property seasons. */
Builder addSeasons(CreativeWorkSeason.Builder value);
/** Add a value to property seasons. */
Builder addSeasons(String value);
/** Add a value to property sourceOrganization. */
Builder addSourceOrganization(Organization value);
/** Add a value to property sourceOrganization. */
Builder addSourceOrganization(Organization.Builder value);
/** Add a value to property sourceOrganization. */
Builder addSourceOrganization(String value);
/** Add a value to property startDate. */
Builder addStartDate(Date value);
/** Add a value to property startDate. */
Builder addStartDate(String value);
/** Add a value to property text. */
Builder addText(Text value);
/** Add a value to property text. */
Builder addText(String value);
/** Add a value to property thumbnailUrl. */
Builder addThumbnailUrl(URL value);
/** Add a value to property thumbnailUrl. */
Builder addThumbnailUrl(String value);
/** Add a value to property timeRequired. */
Builder addTimeRequired(Duration value);
/** Add a value to property timeRequired. */
Builder addTimeRequired(Duration.Builder value);
/** Add a value to property timeRequired. */
Builder addTimeRequired(String value);
/** Add a value to property trailer. */
Builder addTrailer(VideoObject value);
/** Add a value to property trailer. */
Builder addTrailer(VideoObject.Builder value);
/** Add a value to property trailer. */
Builder addTrailer(String value);
/** Add a value to property translator. */
Builder addTranslator(Organization value);
/** Add a value to property translator. */
Builder addTranslator(Organization.Builder value);
/** Add a value to property translator. */
Builder addTranslator(Person value);
/** Add a value to property translator. */
Builder addTranslator(Person.Builder value);
/** Add a value to property translator. */
Builder addTranslator(String value);
/** Add a value to property typicalAgeRange. */
Builder addTypicalAgeRange(Text value);
/** Add a value to property typicalAgeRange. */
Builder addTypicalAgeRange(String value);
/** Add a value to property url. */
Builder addUrl(URL value);
/** Add a value to property url. */
Builder addUrl(String value);
/** Add a value to property version. */
Builder addVersion(Number value);
/** Add a value to property version. */
Builder addVersion(String value);
/** Add a value to property video. */
Builder addVideo(VideoObject value);
/** Add a value to property video. */
Builder addVideo(VideoObject.Builder value);
/** Add a value to property video. */
Builder addVideo(String value);
/** Add a value to property workExample. */
Builder addWorkExample(CreativeWork value);
/** Add a value to property workExample. */
Builder addWorkExample(CreativeWork.Builder value);
/** Add a value to property workExample. */
Builder addWorkExample(String value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(Article.Builder value);
/** Add a value to property detailedDescription. */
Builder addDetailedDescription(String value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(PopularityScoreSpecification.Builder value);
/** Add a value to property popularityScore. */
Builder addPopularityScore(String value);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The value of the property.
*/
Builder addProperty(String name, SchemaOrgType value);
/**
* Add a value to property.
*
* @param name The property name.
* @param builder The schema.org object builder for the property value.
*/
Builder addProperty(String name, Thing.Builder builder);
/**
* Add a value to property.
*
* @param name The property name.
* @param value The string value of the property.
*/
Builder addProperty(String name, String value);
/** Build a {@link VideoGameSeries} object. */
VideoGameSeries build();
}
/**
* Returns the value list of property actor. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getActorList();
/**
* Returns the value list of property actors. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getActorsList();
/**
* Returns the value list of property characterAttribute. Empty list is returned if the property
* not set in current object.
*/
ImmutableList<SchemaOrgType> getCharacterAttributeList();
/**
* Returns the value list of property cheatCode. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getCheatCodeList();
/**
* Returns the value list of property containsSeason. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getContainsSeasonList();
/**
* Returns the value list of property director. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getDirectorList();
/**
* Returns the value list of property directors. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getDirectorsList();
/**
* Returns the value list of property episode. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getEpisodeList();
/**
* Returns the value list of property episodes. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getEpisodesList();
/**
* Returns the value list of property gameItem. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getGameItemList();
/**
* Returns the value list of property gameLocation. Empty list is returned if the property not set
* in current object.
*/
ImmutableList<SchemaOrgType> getGameLocationList();
/**
* Returns the value list of property gamePlatform. Empty list is returned if the property not set
* in current object.
*/
ImmutableList<SchemaOrgType> getGamePlatformList();
/**
* Returns the value list of property musicBy. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getMusicByList();
/**
* Returns the value list of property numberOfEpisodes. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getNumberOfEpisodesList();
/**
* Returns the value list of property numberOfPlayers. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getNumberOfPlayersList();
/**
* Returns the value list of property numberOfSeasons. Empty list is returned if the property not
* set in current object.
*/
ImmutableList<SchemaOrgType> getNumberOfSeasonsList();
/**
* Returns the value list of property playMode. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getPlayModeList();
/**
* Returns the value list of property productionCompany. Empty list is returned if the property
* not set in current object.
*/
ImmutableList<SchemaOrgType> getProductionCompanyList();
/**
* Returns the value list of property quest. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getQuestList();
/**
* Returns the value list of property season. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getSeasonList();
/**
* Returns the value list of property seasons. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getSeasonsList();
/**
* Returns the value list of property trailer. Empty list is returned if the property not set in
* current object.
*/
ImmutableList<SchemaOrgType> getTrailerList();
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.